Esempio n. 1
0
  @Test
  public void testConnections() throws Exception {

    // initial state is one connection
    String json = metrics.dumpJson();
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.OPEN_CONNECTIONS, 1);

    // create two connections
    HiveMetaStoreClient msc = new HiveMetaStoreClient(hiveConf);
    HiveMetaStoreClient msc2 = new HiveMetaStoreClient(hiveConf);

    json = metrics.dumpJson();
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.OPEN_CONNECTIONS, 3);

    // close one connection, verify still two left
    msc.close();
    json = metrics.dumpJson();
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.OPEN_CONNECTIONS, 2);

    // close one connection, verify still one left
    msc2.close();
    json = metrics.dumpJson();
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.OPEN_CONNECTIONS, 1);
  }
Esempio n. 2
0
  @Test
  public void testMethodCounts() throws Exception {
    driver.run("show databases");
    String json = metrics.dumpJson();

    // one call by init, one called here.
    MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_get_all_databases", 2);
  }
Esempio n. 3
0
  @Test
  public void testMetaDataCounts() throws Exception {
    // 1 databases created
    driver.run("create database testdb1");

    // 4 tables
    driver.run("create table testtbl1 (key string)");
    driver.run("create table testtblpart (key string) partitioned by (partkey string)");
    driver.run("use testdb1");
    driver.run("create table testtbl2 (key string)");
    driver.run("create table testtblpart2 (key string) partitioned by (partkey string)");

    // 6 partitions
    driver.run("alter table default.testtblpart add partition (partkey='a')");
    driver.run("alter table default.testtblpart add partition (partkey='b')");
    driver.run("alter table default.testtblpart add partition (partkey='c')");
    driver.run("alter table testdb1.testtblpart2 add partition (partkey='a')");
    driver.run("alter table testdb1.testtblpart2 add partition (partkey='b')");
    driver.run("alter table testdb1.testtblpart2 add partition (partkey='c')");

    // create and drop some additional metadata, to test drop counts.
    driver.run("create database tempdb");
    driver.run("use tempdb");

    driver.run("create table delete_by_table (key string) partitioned by (partkey string)");
    driver.run("alter table delete_by_table add partition (partkey='temp')");
    driver.run("drop table delete_by_table");

    driver.run("create table delete_by_part (key string) partitioned by (partkey string)");
    driver.run("alter table delete_by_part add partition (partkey='temp')");
    driver.run("alter table delete_by_part drop partition (partkey='temp')");

    driver.run("create table delete_by_db (key string) partitioned by (partkey string)");
    driver.run("alter table delete_by_db add partition (partkey='temp')");
    driver.run("use default");
    driver.run("drop database tempdb cascade");

    // give timer thread a chance to print the metrics
    CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
    String json = metrics.dumpJson();
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_DATABASES, 2);
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_TABLES, 7);
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.CREATE_TOTAL_PARTITIONS, 9);

    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.DELETE_TOTAL_DATABASES, 1);
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.DELETE_TOTAL_TABLES, 3);
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.COUNTER, MetricsConstant.DELETE_TOTAL_PARTITIONS, 3);

    // to test initial metadata count metrics.
    hiveConf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, ObjectStore.class.getName());
    HiveMetaStore.HMSHandler baseHandler = new HiveMetaStore.HMSHandler("test", hiveConf, false);
    baseHandler.init();
    baseHandler.updateMetrics();

    // 1 new db + default
    json = metrics.dumpJson();
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_DATABASES, 2);
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_TABLES, 4);
    MetricsTestUtils.verifyMetricsJson(
        json, MetricsTestUtils.GAUGE, MetricsConstant.INIT_TOTAL_PARTITIONS, 6);
  }