Ejemplo n.º 1
0
  private CliSessionState startSessionState() throws IOException {

    HiveConf.setVar(
        conf,
        HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
        "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");

    String execEngine = conf.get("hive.execution.engine");
    conf.set("hive.execution.engine", "mr");
    CliSessionState ss = new CliSessionState(conf);
    assert ss != null;
    ss.in = System.in;
    ss.out = System.out;
    ss.err = System.out;

    SessionState oldSs = SessionState.get();
    if (oldSs != null && clusterType == MiniClusterType.tez) {
      oldSs.close();
    }
    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
      oldSs.out.close();
    }
    SessionState.start(ss);

    isSessionStateStarted = true;

    conf.set("hive.execution.engine", execEngine);
    return ss;
  }
  public static void baseSetup() throws Exception {
    MiniDFSShim dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
    fs = dfs.getFileSystem();
    baseDfsDir = new Path(new Path(fs.getUri()), "/base");
    fs.mkdirs(baseDfsDir);
    warehouseDir = new Path(baseDfsDir, "warehouse");
    fs.mkdirs(warehouseDir);
    conf.setVar(ConfVars.METASTOREWAREHOUSE, warehouseDir.toString());

    // Assuming the tests are run either in C or D drive in Windows OS!
    dataFileDir =
        conf.get("test.data.files")
            .replace('\\', '/')
            .replace("c:", "")
            .replace("C:", "")
            .replace("D:", "")
            .replace("d:", "");
    dataFilePath = new Path(dataFileDir, "kv1.txt");

    // Set up scratch directory
    Path scratchDir = new Path(baseDfsDir, "scratchdir");
    conf.setVar(HiveConf.ConfVars.SCRATCHDIR, scratchDir.toString());

    // set hive conf vars
    conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
    conf.setBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true);
    conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
    int port = MetaStoreUtils.findFreePort();
    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());

    SessionState.start(new CliSessionState(conf));
    driver = new Driver(conf);
    setupDataTable();
  }
  public void generateTestData() throws Exception {

    // remove data from previous runs.
    cleanDir(DB_DIR);
    cleanDir(WH_DIR);

    HiveConf conf = new HiveConf();

    conf.set(
        "javax.jdo.option.ConnectionURL",
        String.format("jdbc:derby:;databaseName=%s;create=true", DB_DIR));
    conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
    conf.set("hive.metastore.warehouse.dir", WH_DIR);

    SessionState ss = new SessionState(new HiveConf(SessionState.class));
    SessionState.start(ss);
    hiveDriver = new Driver(conf);

    // generate (key, value) test data
    String testDataFile = generateTestDataFile();

    createTableAndLoadData("default", "kv", testDataFile);
    executeQuery("CREATE DATABASE IF NOT EXISTS db1");
    createTableAndLoadData("db1", "kv_db1", testDataFile);

    ss.close();
  }
Ejemplo n.º 4
0
 private void applyAuthorizationConfigPolicy(HiveConf newHiveConf)
     throws HiveException, MetaException {
   // authorization setup using SessionState should be revisited eventually, as
   // authorization and authentication are not session specific settings
   SessionState ss = new SessionState(newHiveConf);
   ss.setIsHiveServerQuery(true);
   SessionState.start(ss);
   ss.applyAuthorizationPolicy();
 }
Ejemplo n.º 5
0
  public static void main(String[] args) throws Exception {

    HiveConf conf = new HiveConf();
    conf.addResource(new Path("file:///", System.getProperty("oozie.action.conf.xml")));
    conf.setVar(ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
    conf.setBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL, true);
    SessionState.start(new CliSessionState(conf));
    new CliDriver().processLine(args[0]);
  }
Ejemplo n.º 6
0
 public void init() throws Exception {
   testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
   String execEngine = conf.get("hive.execution.engine");
   conf.set("hive.execution.engine", "mr");
   SessionState.start(conf);
   conf.set("hive.execution.engine", execEngine);
   db = Hive.get(conf);
   pd = new ParseDriver();
   sem = new SemanticAnalyzer(conf);
 }
Ejemplo n.º 7
0
    /** A constructor. */
    public HiveServerHandler() throws MetaException {
      super(HiveServer.class.getName());

      isHiveQuery = false;
      SessionState session = new SessionState(new HiveConf(SessionState.class));
      SessionState.start(session);
      session.in = null;
      session.out = null;
      session.err = null;
      driver = new Driver();
    }
Ejemplo n.º 8
0
  public String cliInit(String tname, boolean recreate) throws Exception {
    if (recreate) {
      cleanUp();
      createSources();
    }

    HiveConf.setVar(
        conf,
        HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
        "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
    Utilities.clearWorkMap();
    CliSessionState ss = new CliSessionState(conf);
    assert ss != null;
    ss.in = System.in;

    String outFileExtension = getOutFileExtension(tname);
    String stdoutName = null;
    if (outDir != null) {
      File qf = new File(outDir, tname);
      stdoutName = qf.getName().concat(outFileExtension);
    } else {
      stdoutName = tname + outFileExtension;
    }

    File outf = new File(logDir, stdoutName);
    OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
    if (qSortQuerySet.contains(tname)) {
      ss.out = new SortPrintStream(fo, "UTF-8");
    } else if (qHashQuerySet.contains(tname)) {
      ss.out = new DigestPrintStream(fo, "UTF-8");
    } else if (qSortNHashQuerySet.contains(tname)) {
      ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
    } else {
      ss.out = new PrintStream(fo, true, "UTF-8");
    }
    ss.err = new CachingPrintStream(fo, true, "UTF-8");
    ss.setIsSilent(true);
    SessionState oldSs = SessionState.get();

    if (oldSs != null && clusterType == MiniClusterType.tez) {
      oldSs.close();
    }

    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
      oldSs.out.close();
    }
    SessionState.start(ss);

    cliDriver = new CliDriver();
    cliDriver.processInitFiles(ss);

    return outf.getAbsolutePath();
  }
Ejemplo n.º 9
0
    /**
     * Separate from constructor, because initialize() may need to be called in a separate thread.
     */
    synchronized void initialize() {
      assertState(QueryState.CREATED);
      this.hiveConf = new HiveConf(Driver.class);

      // Update configuration with user/group info.
      if (query.hadoop_user == null) {
        throw new RuntimeException("User must be specified.");
      }

      // Update scratch dir (to have one per user)
      File scratchDir = new File("/tmp/hive-beeswax-" + query.hadoop_user);
      hiveConf.set(HiveConf.ConfVars.SCRATCHDIR.varname, scratchDir.getPath());
      // Create the temporary directory if necessary.
      // If mapred.job.tracker is set to local, this is used by MapRedTask.
      if (!scratchDir.isDirectory()) {
        if (scratchDir.exists() || !scratchDir.mkdirs()) {
          LOG.warn("Could not create tmp dir:" + scratchDir);
        }
      }

      driver = new Driver(hiveConf);
      ClassLoader loader = hiveConf.getClassLoader();
      String auxJars = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVEAUXJARS);
      if (StringUtils.isNotBlank(auxJars)) {
        try {
          loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
        } catch (Exception e) {
          LOG.error("Failed to add jars to class loader: " + auxJars, e);
        }
      }
      hiveConf.setClassLoader(loader);
      Thread.currentThread().setContextClassLoader(loader);
      SessionState.start(hiveConf); // this is thread-local
      this.sessionState = SessionState.get();

      // If this work has a LogContext, associate the children output to the logContext
      OutputStream lcOutStream = null;
      if (this.logContext != null) lcOutStream = this.logContext.getOutputStream();

      // A copy of everything goes to the LogContext.
      // In addition, stderr goes to errStream for error reporting.
      // Note that child output is explicitly tee to System.{out,err},
      // otherwise it'll be swallowed by outStream.
      this.sessionState.out = new PrintStream(new TeeOutputStream(lcOutStream, this.outStream));
      this.sessionState.err = new PrintStream(new TeeOutputStream(lcOutStream, this.errStream));
      this.sessionState.childOut =
          new PrintStream(new TeeOutputStream(System.out, sessionState.out));
      this.sessionState.childErr =
          new PrintStream(new TeeOutputStream(System.err, sessionState.err));

      this.state = QueryState.INITIALIZED;
    }
Ejemplo n.º 10
0
  @BeforeClass
  public void setUp() throws Exception {
    // Set-up hive session
    HiveConf conf = new HiveConf();
    driver = new Driver(conf);
    ss = new SessionState(conf, System.getProperty("user.name"));
    ss = SessionState.start(ss);
    SessionState.setCurrentSessionState(ss);

    Configuration configuration = ApplicationProperties.get();
    dgiCLient =
        new AtlasClient(configuration.getString(HiveMetaStoreBridge.ATLAS_ENDPOINT, DGI_URL));
  }
  @Override
  protected void setUp() throws Exception {

    super.setUp();
    System.setProperty("hive.metastore.init.hooks", DummyMetaStoreInitListener.class.getName());
    int port = MetaStoreUtils.findFreePort();
    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
    hiveConf = new HiveConf(this.getClass());
    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    SessionState.start(new CliSessionState(hiveConf));
    msc = new HiveMetaStoreClient(hiveConf);
    driver = new Driver(hiveConf);
  }
  @Before
  public void setup() throws Exception {
    conf = new HiveConf();
    conf.setVar(
        ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY,
        SentryHiveAuthorizationTaskFactoryImpl.class.getName());
    db = Mockito.mock(Hive.class);
    table = new Table(DB, TABLE);
    partition = new Partition(table);
    context = new Context(conf);
    parseDriver = new ParseDriver();
    analyzer = new DDLSemanticAnalyzer(conf, db);
    SessionState.start(conf);
    Mockito.when(db.getTable(TABLE, false)).thenReturn(table);
    Mockito.when(db.getPartition(table, new HashMap<String, String>(), false))
        .thenReturn(partition);

    HadoopDefaultAuthenticator auth = new HadoopDefaultAuthenticator();
    auth.setConf(conf);
    currentUser = auth.getUserName();
  }
  @Override
  protected void setUp() throws Exception {

    super.setUp();
    System.setProperty(ConfVars.METASTORE_EVENT_LISTENERS.varname, DummyListener.class.getName());
    System.setProperty(
        ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, DummyPreListener.class.getName());
    Thread t = new Thread(new RunMS());
    t.start();
    Thread.sleep(40000);
    hiveConf = new HiveConf(this.getClass());
    hiveConf.setBoolVar(ConfVars.METASTORE_MODE, false);
    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort);
    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3);
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    SessionState.start(new CliSessionState(hiveConf));
    msc = new HiveMetaStoreClient(hiveConf, null);
    driver = new Driver(hiveConf);
  }
Ejemplo n.º 14
0
  @BeforeClass
  public static void before() throws Exception {
    int port = MetaStoreUtils.findFreePort();

    hiveConf = new HiveConf(TestMetaStoreMetrics.class);
    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_METRICS, true);
    hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);

    MetricsFactory.close();
    MetricsFactory.init(hiveConf);
    metrics = (CodahaleMetrics) MetricsFactory.getInstance();

    // Increments one HMS connection
    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge(), hiveConf);

    // Increments one HMS connection (Hive.get())
    SessionState.start(new CliSessionState(hiveConf));
    driver = new Driver(hiveConf);
  }
Ejemplo n.º 15
0
 @BeforeTest
 @Override
 public void beforeTest() throws Exception {
   super.beforeTest();
   List<FieldSchema> factColumns = new ArrayList<>();
   factColumns.add(new FieldSchema("continent", "int", ""));
   factColumns.add(new FieldSchema("country", "int", ""));
   factColumns.add(new FieldSchema("region", "int", ""));
   factColumns.add(new FieldSchema("city", "int", ""));
   factColumns.add(new FieldSchema("count", "double", ""));
   factColumns.add(new FieldSchema("added", "double", ""));
   try {
     HiveConf hiveConf = new HiveConf();
     SessionState.start(hiveConf);
     Hive.get().dropTable("default.wikipedia");
     createHiveTable("default", "wikipedia", factColumns);
     Table tbl = CubeMetastoreClient.getInstance(hiveConf).getHiveTable("wikipedia");
     tbl.setProperty("druid.table.time.dimension", "time");
   } catch (HiveException e) {
     log.error("Exception while creating hive table", e);
   }
 }
Ejemplo n.º 16
0
 /** todo: what should this do on failure? Should it rethrow? Invalidate stats? */
 void gatherStats() throws IOException {
   if (!ci.isMajorCompaction()) {
     return;
   }
   if (columnList.isEmpty()) {
     LOG.debug(
         "No existing stats for "
             + ci.dbname
             + "."
             + ci.tableName
             + " found.  Will not run analyze.");
     return; // nothing to do
   }
   // e.g. analyze table page_view partition(dt='10/15/2014',country=’US’)
   // compute statistics for columns viewtime
   StringBuilder sb =
       new StringBuilder("analyze table ").append(ci.dbname).append(".").append(ci.tableName);
   if (ci.partName != null) {
     try {
       sb.append(" partition(");
       Map<String, String> partitionColumnValues = Warehouse.makeEscSpecFromName(ci.partName);
       for (Map.Entry<String, String> ent : partitionColumnValues.entrySet()) {
         sb.append(ent.getKey()).append("='").append(ent.getValue()).append("'");
       }
       sb.append(")");
     } catch (MetaException ex) {
       throw new IOException(ex);
     }
   }
   sb.append(" compute statistics for columns ");
   for (String colName : columnList) {
     sb.append(colName).append(",");
   }
   sb.setLength(sb.length() - 1); // remove trailing ,
   LOG.info("running '" + sb.toString() + "'");
   Driver d = new Driver(conf, userName);
   SessionState localSession = null;
   if (SessionState.get() == null) {
     localSession = SessionState.start(new SessionState(conf));
   }
   try {
     CommandProcessorResponse cpr = d.run(sb.toString());
     if (cpr.getResponseCode() != 0) {
       throw new IOException(
           "Could not update stats for table "
               + ci.getFullTableName()
               + (ci.partName == null ? "" : "/" + ci.partName)
               + " due to: "
               + cpr);
     }
   } catch (CommandNeedRetryException cnre) {
     throw new IOException(
         "Could not update stats for table "
             + ci.getFullTableName()
             + (ci.partName == null ? "" : "/" + ci.partName)
             + " due to: "
             + cnre.getMessage());
   } finally {
     if (localSession != null) {
       localSession.close();
     }
   }
 }
Ejemplo n.º 17
0
  @SuppressWarnings("unchecked")
  @Before
  public void setUp() throws Exception {
    utils = mock(DagUtils.class);
    fs = mock(FileSystem.class);
    path = mock(Path.class);
    when(path.getFileSystem(any(Configuration.class))).thenReturn(fs);
    when(utils.getTezDir(any(Path.class))).thenReturn(path);
    when(utils.createVertex(
            any(JobConf.class),
            any(BaseWork.class),
            any(Path.class),
            any(LocalResource.class),
            any(List.class),
            any(FileSystem.class),
            any(Context.class),
            anyBoolean(),
            any(TezWork.class),
            any(VertexType.class)))
        .thenAnswer(
            new Answer<Vertex>() {

              @Override
              public Vertex answer(InvocationOnMock invocation) throws Throwable {
                Object[] args = invocation.getArguments();
                return Vertex.create(
                    ((BaseWork) args[1]).getName(),
                    mock(ProcessorDescriptor.class),
                    0,
                    mock(Resource.class));
              }
            });

    when(utils.createEdge(
            any(JobConf.class),
            any(Vertex.class),
            any(Vertex.class),
            any(TezEdgeProperty.class),
            any(VertexType.class)))
        .thenAnswer(
            new Answer<Edge>() {

              @Override
              public Edge answer(InvocationOnMock invocation) throws Throwable {
                Object[] args = invocation.getArguments();
                return Edge.create((Vertex) args[1], (Vertex) args[2], mock(EdgeProperty.class));
              }
            });

    work = new TezWork("");

    mws = new MapWork[] {new MapWork(), new MapWork()};
    rws = new ReduceWork[] {new ReduceWork(), new ReduceWork()};

    work.addAll(mws);
    work.addAll(rws);

    int i = 0;
    for (BaseWork w : work.getAllWork()) {
      w.setName("Work " + (++i));
    }

    op = mock(Operator.class);

    LinkedHashMap<String, Operator<? extends OperatorDesc>> map =
        new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
    map.put("foo", op);
    mws[0].setAliasToWork(map);
    mws[1].setAliasToWork(map);

    LinkedHashMap<String, ArrayList<String>> pathMap =
        new LinkedHashMap<String, ArrayList<String>>();
    ArrayList<String> aliasList = new ArrayList<String>();
    aliasList.add("foo");
    pathMap.put("foo", aliasList);

    mws[0].setPathToAliases(pathMap);
    mws[1].setPathToAliases(pathMap);

    rws[0].setReducer(op);
    rws[1].setReducer(op);

    TezEdgeProperty edgeProp = new TezEdgeProperty(EdgeType.SIMPLE_EDGE);
    work.connect(mws[0], rws[0], edgeProp);
    work.connect(mws[1], rws[0], edgeProp);
    work.connect(rws[0], rws[1], edgeProp);

    task = new TezTask(utils);
    task.setWork(work);
    task.setConsole(mock(LogHelper.class));

    conf = new JobConf();
    appLr = mock(LocalResource.class);

    SessionState.start(new HiveConf());
    session = mock(TezClient.class);
    sessionState = mock(TezSessionState.class);
    when(sessionState.getSession()).thenReturn(session);
    when(session.submitDAG(any(DAG.class)))
        .thenThrow(new SessionNotRunning(""))
        .thenReturn(mock(DAGClient.class));
  }
Ejemplo n.º 18
0
  private void generateTestData() throws Exception {
    HiveConf conf = new HiveConf(SessionState.class);

    conf.set(
        "javax.jdo.option.ConnectionURL",
        String.format("jdbc:derby:;databaseName=%s;create=true", dbDir));
    conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
    conf.set("hive.metastore.warehouse.dir", whDir);
    conf.set("mapred.job.tracker", "local");
    conf.set(ConfVars.SCRATCHDIR.varname, getTempDir("scratch_dir"));
    conf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir"));
    conf.set(ConfVars.DYNAMICPARTITIONINGMODE.varname, "nonstrict");

    SessionState ss = new SessionState(conf);
    SessionState.start(ss);
    Driver hiveDriver = new Driver(conf);

    // generate (key, value) test data
    String testDataFile = generateTestDataFile();

    // Create a (key, value) schema table with Text SerDe which is available in hive-serdes.jar
    executeQuery(
        hiveDriver,
        "CREATE TABLE IF NOT EXISTS default.kv(key INT, value STRING) "
            + "ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE");
    executeQuery(
        hiveDriver,
        "LOAD DATA LOCAL INPATH '" + testDataFile + "' OVERWRITE INTO TABLE default.kv");

    // Create a (key, value) schema table in non-default database with RegexSerDe which is available
    // in hive-contrib.jar
    // Table with RegExSerde is expected to have columns of STRING type only.
    executeQuery(hiveDriver, "CREATE DATABASE IF NOT EXISTS db1");
    executeQuery(
        hiveDriver,
        "CREATE TABLE db1.kv_db1(key STRING, value STRING) "
            + "ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe' "
            + "WITH SERDEPROPERTIES ("
            + "  \"input.regex\" = \"([0-9]*), (.*_[0-9]*)\", "
            + "  \"output.format.string\" = \"%1$s, %2$s\""
            + ") ");
    executeQuery(hiveDriver, "INSERT INTO TABLE db1.kv_db1 SELECT * FROM default.kv");

    // Create an Avro format based table backed by schema in a separate file
    final String avroCreateQuery =
        String.format(
            "CREATE TABLE db1.avro "
                + "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' "
                + "STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' "
                + "OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' "
                + "TBLPROPERTIES ('avro.schema.url'='file:///%s')",
            BaseTestQuery.getPhysicalFileFromResource("avro_test_schema.json").replace('\\', '/'));

    executeQuery(hiveDriver, avroCreateQuery);
    executeQuery(hiveDriver, "INSERT INTO TABLE db1.avro SELECT * FROM default.kv");

    executeQuery(hiveDriver, "USE default");

    // create a table with no data
    executeQuery(hiveDriver, "CREATE TABLE IF NOT EXISTS empty_table(a INT, b STRING)");
    // delete the table location of empty table
    File emptyTableLocation = new File(whDir, "empty_table");
    if (emptyTableLocation.exists()) {
      FileUtils.forceDelete(emptyTableLocation);
    }

    // create a Hive table that has columns with data types which are supported for reading in
    // Drill.
    testDataFile = generateAllTypesDataFile();
    executeQuery(
        hiveDriver,
        "CREATE TABLE IF NOT EXISTS readtest ("
            + "  binary_field BINARY,"
            + "  boolean_field BOOLEAN,"
            + "  tinyint_field TINYINT,"
            + "  decimal0_field DECIMAL,"
            + "  decimal9_field DECIMAL(6, 2),"
            + "  decimal18_field DECIMAL(15, 5),"
            + "  decimal28_field DECIMAL(23, 1),"
            + "  decimal38_field DECIMAL(30, 3),"
            + "  double_field DOUBLE,"
            + "  float_field FLOAT,"
            + "  int_field INT,"
            + "  bigint_field BIGINT,"
            + "  smallint_field SMALLINT,"
            + "  string_field STRING,"
            + "  varchar_field VARCHAR(50),"
            + "  timestamp_field TIMESTAMP,"
            + "  date_field DATE"
            + ") PARTITIONED BY ("
            + "  binary_part BINARY,"
            + "  boolean_part BOOLEAN,"
            + "  tinyint_part TINYINT,"
            + "  decimal0_part DECIMAL,"
            + "  decimal9_part DECIMAL(6, 2),"
            + "  decimal18_part DECIMAL(15, 5),"
            + "  decimal28_part DECIMAL(23, 1),"
            + "  decimal38_part DECIMAL(30, 3),"
            + "  double_part DOUBLE,"
            + "  float_part FLOAT,"
            + "  int_part INT,"
            + "  bigint_part BIGINT,"
            + "  smallint_part SMALLINT,"
            + "  string_part STRING,"
            + "  varchar_part VARCHAR(50),"
            + "  timestamp_part TIMESTAMP,"
            + "  date_part DATE"
            + ") ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' "
            + "TBLPROPERTIES ('serialization.null.format'='') ");

    // Add a partition to table 'readtest'
    executeQuery(
        hiveDriver,
        "ALTER TABLE readtest ADD IF NOT EXISTS PARTITION ( "
            + "  binary_part='binary', "
            + "  boolean_part='true', "
            + "  tinyint_part='64', "
            + "  decimal0_part='36.9', "
            + "  decimal9_part='36.9', "
            + "  decimal18_part='3289379872.945645', "
            + "  decimal28_part='39579334534534.35345', "
            + "  decimal38_part='363945093845093890.9', "
            + "  double_part='8.345', "
            + "  float_part='4.67', "
            + "  int_part='123456', "
            + "  bigint_part='234235', "
            + "  smallint_part='3455', "
            + "  string_part='string', "
            + "  varchar_part='varchar', "
            + "  timestamp_part='2013-07-05 17:01:00', "
            + "  date_part='2013-07-05')");

    // Add a second partition to table 'readtest' which contains the same values as the first
    // partition except
    // for boolean_part partition column
    executeQuery(
        hiveDriver,
        "ALTER TABLE readtest ADD IF NOT EXISTS PARTITION ( "
            + "  binary_part='binary', "
            + "  boolean_part='false', "
            + "  tinyint_part='64', "
            + "  decimal0_part='36.9', "
            + "  decimal9_part='36.9', "
            + "  decimal18_part='3289379872.945645', "
            + "  decimal28_part='39579334534534.35345', "
            + "  decimal38_part='363945093845093890.9', "
            + "  double_part='8.345', "
            + "  float_part='4.67', "
            + "  int_part='123456', "
            + "  bigint_part='234235', "
            + "  smallint_part='3455', "
            + "  string_part='string', "
            + "  varchar_part='varchar', "
            + "  timestamp_part='2013-07-05 17:01:00', "
            + "  date_part='2013-07-05')");

    // Load data into table 'readtest'
    executeQuery(
        hiveDriver,
        String.format(
            "LOAD DATA LOCAL INPATH '%s' OVERWRITE INTO TABLE default.readtest PARTITION ("
                + "  binary_part='binary', "
                + "  boolean_part='true', "
                + "  tinyint_part='64', "
                + "  decimal0_part='36.9', "
                + "  decimal9_part='36.9', "
                + "  decimal18_part='3289379872.945645', "
                + "  decimal28_part='39579334534534.35345', "
                + "  decimal38_part='363945093845093890.9', "
                + "  double_part='8.345', "
                + "  float_part='4.67', "
                + "  int_part='123456', "
                + "  bigint_part='234235', "
                + "  smallint_part='3455', "
                + "  string_part='string', "
                + "  varchar_part='varchar', "
                + "  timestamp_part='2013-07-05 17:01:00', "
                + "  date_part='2013-07-05')",
            testDataFile));

    // create a table that has all Hive types. This is to test how hive tables metadata is populated
    // in
    // Drill's INFORMATION_SCHEMA.
    executeQuery(
        hiveDriver,
        "CREATE TABLE IF NOT EXISTS infoschematest("
            + "booleanType BOOLEAN, "
            + "tinyintType TINYINT, "
            + "smallintType SMALLINT, "
            + "intType INT, "
            + "bigintType BIGINT, "
            + "floatType FLOAT, "
            + "doubleType DOUBLE, "
            + "dateType DATE, "
            + "timestampType TIMESTAMP, "
            + "binaryType BINARY, "
            + "decimalType DECIMAL(38, 2), "
            + "stringType STRING, "
            + "varCharType VARCHAR(20), "
            + "listType ARRAY<STRING>, "
            + "mapType MAP<STRING,INT>, "
            + "structType STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>, "
            + "uniontypeType UNIONTYPE<int, double, array<string>>)");

    // create a Hive view to test how its metadata is populated in Drill's INFORMATION_SCHEMA
    executeQuery(hiveDriver, "CREATE VIEW IF NOT EXISTS hiveview AS SELECT * FROM kv");

    executeQuery(
        hiveDriver,
        "CREATE TABLE IF NOT EXISTS "
            + "partition_pruning_test_loadtable(a DATE, b TIMESTAMP, c INT, d INT, e INT) "
            + "ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE");
    executeQuery(
        hiveDriver,
        String.format(
            "LOAD DATA LOCAL INPATH '%s' INTO TABLE partition_pruning_test_loadtable",
            generateTestDataFileForPartitionInput()));

    // create partitioned hive table to test partition pruning
    executeQuery(
        hiveDriver,
        "CREATE TABLE IF NOT EXISTS partition_pruning_test(a DATE, b TIMESTAMP) "
            + "partitioned by (c INT, d INT, e INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE");
    executeQuery(
        hiveDriver,
        "INSERT OVERWRITE TABLE partition_pruning_test PARTITION(c, d, e) "
            + "SELECT a, b, c, d, e FROM partition_pruning_test_loadtable");

    // Add a partition with custom location
    executeQuery(
        hiveDriver,
        String.format(
            "ALTER TABLE partition_pruning_test ADD PARTITION (c=99, d=98, e=97) LOCATION '%s'",
            getTempDir("part1")));
    executeQuery(
        hiveDriver,
        String.format(
            "INSERT INTO TABLE partition_pruning_test PARTITION(c=99, d=98, e=97) "
                + "SELECT '%s', '%s' FROM kv LIMIT 1",
            new Date(System.currentTimeMillis()).toString(),
            new Timestamp(System.currentTimeMillis()).toString()));

    executeQuery(hiveDriver, "DROP TABLE partition_pruning_test_loadtable");

    ss.close();
  }
  public OpetatorTreeDemo() {
    super(new GridLayout(1, 0));

    DefaultMutableTreeNode top = null;

    try {
      ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
      System.out.println(classLoader.getResource("hive-default.xml"));
      System.out.println(classLoader.getResource("hive-site.xml"));
      HiveConf hiveConf = new HiveConf(SessionState.class);
      SessionState.start(new SessionState(hiveConf));
      Context ctx = new Context(hiveConf);
      ctx.setTryCount(10);
      ctx.setCmd(CommondDemo.command3);
      ctx.setHDFSCleanup(true);
      ParseDriver pd = new ParseDriver();
      ASTNode astTree = pd.parse(CommondDemo.command3, ctx);
      astTree = ParseUtils.findRootNonNullToken(astTree);
      SemanticAnalyzer sem = (SemanticAnalyzer) SemanticAnalyzerFactory.get(hiveConf, astTree);
      sem.analyze(astTree, ctx);
      sem.validate();
      List<Operator<? extends OperatorDesc>> topOpList =
          new ArrayList<Operator<? extends OperatorDesc>>(sem.topOps.values());
      if (topOpList.size() == 1) {
        top = createNodes(topOpList.get(0));
      } else {
        top = new DefaultMutableTreeNode("root");
        for (Operator<? extends OperatorDesc> op : topOpList) {
          top.add(createNodes(op));
        }
      }
    } catch (Exception e1) {
      // TODO Auto-generated catch block
      e1.printStackTrace();
    }

    // Create a tree that allows one selection at a time.
    tree = new JTree(top);
    tree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);

    // Listen for when the selection changes.
    tree.addTreeSelectionListener(this);

    if (playWithLineStyle) {
      System.out.println("line style = " + lineStyle);
      tree.putClientProperty("JTree.lineStyle", lineStyle);
    }

    // Create the scroll pane and add the tree to it.
    JScrollPane treeView = new JScrollPane(tree);

    // Create the HTML viewing pane.
    htmlPane = new JEditorPane();
    htmlPane.setEditable(false);
    initHelp();
    JScrollPane htmlView = new JScrollPane(htmlPane);

    // Add the scroll panes to a split pane.
    JSplitPane splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT);
    splitPane.setTopComponent(treeView);
    splitPane.setBottomComponent(htmlView);

    Dimension minimumSize = new Dimension(100, 50);
    htmlView.setMinimumSize(minimumSize);
    treeView.setMinimumSize(minimumSize);
    splitPane.setDividerLocation(100);
    splitPane.setPreferredSize(new Dimension(500, 300));

    // Add the split pane to this panel.
    add(splitPane);
  }
Ejemplo n.º 20
0
  public static void main(String[] args) throws Exception {
    OptionsProcessor oproc = new OptionsProcessor();
    if (!oproc.process_stage1(args)) {
      System.exit(1);
    }

    SessionState.initHiveLog4j();

    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
      ss.out = new PrintStream(System.out, true, "UTF-8");
      ss.err = new PrintStream(System.err, true, "UTF-8");
    } catch (UnsupportedEncodingException e) {
      System.exit(3);
    }

    if (!oproc.process_stage2(ss)) {
      System.exit(2);
    }

    HiveConf conf = ss.getConf();
    for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
      conf.set((String) item.getKey(), (String) item.getValue());
    }

    if (!ShimLoader.getHadoopShims().usesJobShell()) {
      ClassLoader loader = conf.getClassLoader();
      String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
      if (StringUtils.isNotBlank(auxJars)) {
        loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
      }
      conf.setClassLoader(loader);
      Thread.currentThread().setContextClassLoader(loader);
    }

    SessionState.start(ss);

    CliDriver cli = new CliDriver();

    Hive hive = Hive.get();

    String username = ss.getUserName();
    String passwd = ss.passwd;
    if (!hive.isAUser(username, passwd)) {
      System.out.println("User or passwd is wrong!");
      System.exit(0);
    } else {
      System.out.println("Connect to TDW successfully!");
    }

    if (ss.getDbName() == null) {
      ss.setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME);
    }
    if (ss.execString != null) {
      System.exit(cli.processLine(ss.execString));
    }

    try {
      if (ss.fileName != null) {
        System.exit(cli.processReader(new BufferedReader(new FileReader(ss.fileName))));
      }
    } catch (FileNotFoundException e) {
      System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
      System.exit(3);
    }

    ConsoleReader reader = new ConsoleReader();
    reader.setBellEnabled(false);

    List<SimpleCompletor> completors = new LinkedList<SimpleCompletor>();
    completors.add(
        new SimpleCompletor(
            new String[] {"set", "from", "create", "load", "describe", "quit", "exit"}));
    reader.addCompletor(new ArgumentCompletor(completors));

    String line;
    final String HISTORYFILE = ".hivehistory";
    String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE;
    reader.setHistory(new History(new File(historyFile)));
    int ret = 0;

    String prefix = "";
    String curPrompt = prompt;
    while ((line = reader.readLine(curPrompt + "> ")) != null) {
      if (!prefix.equals("")) {
        prefix += '\n';
      }
      if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
        line = prefix + line;
        ret = cli.processLine(line);
        prefix = "";
        curPrompt = prompt;
      } else {
        prefix = prefix + line;
        curPrompt = prompt2;
        continue;
      }
    }

    System.exit(ret);
  }
Ejemplo n.º 21
0
 public void bringUp() {
   SessionState.start(this.sessionState);
 }
Ejemplo n.º 22
0
  @Override
  protected void runReportal() throws Exception {
    System.out.println("Reportal Hive: Setting up Hive");
    HiveConf conf = new HiveConf(SessionState.class);

    if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
      conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
    }

    File tempTSVFile = new File("./temp.tsv");
    OutputStream tsvTempOutputStream =
        new BoundedOutputStream(
            new BufferedOutputStream(new FileOutputStream(tempTSVFile)), outputCapacity);
    PrintStream logOut = System.out;

    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    // [email protected]: I disabled this because it appears to swallow
    // all future logging (even outside of hive).
    // SessionState.initHiveLog4j();

    String orig = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

    CliSessionState sessionState = new CliSessionState(conf);
    sessionState.in = System.in;
    sessionState.out = new PrintStream(tsvTempOutputStream, true, "UTF-8");
    sessionState.err = new PrintStream(logOut, true, "UTF-8");

    OptionsProcessor oproc = new OptionsProcessor();

    // Feed in Hive Args
    String[] args = buildHiveArgs();
    if (!oproc.process_stage1(args)) {
      throw new Exception("unable to parse options stage 1");
    }

    if (!oproc.process_stage2(sessionState)) {
      throw new Exception("unable to parse options stage 2");
    }

    // Set all properties specified via command line
    for (Map.Entry<Object, Object> item : sessionState.cmdProperties.entrySet()) {
      conf.set((String) item.getKey(), (String) item.getValue());
    }

    SessionState.start(sessionState);

    String expanded = expandHiveAuxJarsPath(orig);
    if (orig == null || orig.equals(expanded)) {
      System.out.println("Hive aux jars variable not expanded");
    } else {
      System.out.println("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]");
      HiveConf.setVar(conf, HiveConf.ConfVars.HIVEAUXJARS, expanded);
    }

    if (!ShimLoader.getHadoopShims().usesJobShell()) {
      // hadoop-20 and above - we need to augment classpath using hiveconf
      // components
      // see also: code in ExecDriver.java
      ClassLoader loader = conf.getClassLoader();
      String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

      System.out.println("Got auxJars = " + auxJars);

      if (StringUtils.isNotBlank(auxJars)) {
        loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
      }
      conf.setClassLoader(loader);
      Thread.currentThread().setContextClassLoader(loader);
    }

    CliDriver cli = new CliDriver();
    int returnValue = 0;
    String prefix = "";

    returnValue = cli.processLine("set hive.cli.print.header=true;");
    String[] queries = jobQuery.split("\n");
    for (String line : queries) {
      if (!prefix.isEmpty()) {
        prefix += '\n';
      }
      if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
        line = prefix + line;
        line = injectVariables(line);
        System.out.println("Reportal Hive: Running Hive Query: " + line);
        System.out.println(
            "Reportal Hive: HiveConf HIVEAUXJARS: "
                + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS));
        returnValue = cli.processLine(line);
        prefix = "";
      } else {
        prefix = prefix + line;
        continue;
      }
    }

    tsvTempOutputStream.close();

    // convert tsv to csv and write it do disk
    System.out.println("Reportal Hive: Converting output");
    InputStream tsvTempInputStream = new BufferedInputStream(new FileInputStream(tempTSVFile));
    Scanner rowScanner = new Scanner(tsvTempInputStream);
    PrintStream csvOutputStream = new PrintStream(outputStream);
    while (rowScanner.hasNextLine()) {
      String tsvLine = rowScanner.nextLine();
      // strip all quotes, and then quote the columns
      csvOutputStream.println("\"" + tsvLine.replace("\"", "").replace("\t", "\",\"") + "\"");
    }
    rowScanner.close();
    csvOutputStream.close();

    // Flush the temp file out
    tempTSVFile.delete();

    if (returnValue != 0) {
      throw new Exception("Hive query finished with a non zero return code");
    }

    System.out.println("Reportal Hive: Ended successfully");
  }