Esempio n. 1
0
 public static void addJsonList(String json, String user) throws Exception {
   JSONObject jsonObj = new JSONObject(json);
   JSONArray list = jsonObj.getJSONObject("data").getJSONArray("users");
   for (int i = 0; i < list.length(); i++) {
     JSONObject obj = list.getJSONObject(i);
     addjson(obj.toString(), user);
   }
 }
Esempio n. 2
0
  public static String DeleteDownload(String uuid, String callback)
      throws SQLException, JSONException {
    JSONObject jsonObj = new JSONObject();

    MySqlConn m_fpsql = getConn();

    Connection conn = m_fpsql.getConn();
    String strSql = "update adhoc_download set status='DEL',endtime=? where uuid=? ";
    PreparedStatement m_fps = conn.prepareStatement(strSql);
    try {
      int index = 1;
      m_fps.setTimestamp(index++, new java.sql.Timestamp(System.currentTimeMillis()));
      m_fps.setString(index++, uuid);

      m_fps.executeUpdate();
      String fullstrSql = m_fps.toString();
      jsonObj.put("code", "1");
      jsonObj.put("__debug", fullstrSql);
    } catch (Exception e) {
      jsonObj.put("code", "0");
      TableJoin.LOG.error("updatePercent" + m_fps.toString(), e);
      jsonObj.put("__debugerror", m_fps.toString());
    } finally {
      m_fps.close();
      m_fpsql.close();
    }
    if (callback != null && callback.length() > 0) {
      return callback + "(" + jsonObj.toString() + ")";
    } else {
      return jsonObj.toString();
    }
  }
Esempio n. 3
0
 public static void addjson(String json, String user) throws Exception {
   JSONObject jsonObj = new JSONObject(json);
   String userid = jsonObj.getString("userid");
   HashMap<String, String> val = new HashMap<String, String>();
   val.put("userid", jsonObj.getString("userid"));
   val.put("email", jsonObj.getString("email"));
   val.put("cname", jsonObj.getString("cname"));
   val.put("role", String.valueOf(jsonObj.getInt("role")));
   val.put("permission", jsonObj.getJSONArray("permission").toString());
   add(userid, val, user);
 }
Esempio n. 4
0
  public static String create(String userid, Map<String, String> val, String user)
      throws Exception {
    MySqlConn m_fpsql = getConn();
    val.put("opuser", String.valueOf(user));
    val.put("optime", getNowTime());
    JSONObject jsonObj = new JSONObject();

    StringBuffer sqlbuffer = new StringBuffer();
    StringBuffer sqlbuffer2 = new StringBuffer();

    String joinchar = "";
    String[] indexval = new String[val.size()];
    int index = 0;
    for (Entry<String, String> e : val.entrySet()) {
      sqlbuffer.append(joinchar).append(e.getKey());
      sqlbuffer2.append(joinchar).append("?");

      joinchar = ",";
      indexval[index] = e.getValue();
      index++;
    }

    Connection conn = m_fpsql.getConn();
    String strSql =
        "insert into users_json " + "(" + sqlbuffer + ")" + "values" + "(" + sqlbuffer2 + ")";
    PreparedStatement m_fps = conn.prepareStatement(strSql);
    try {
      for (int i = 0; i < indexval.length; i++) {
        m_fps.setString(i + 1, indexval[i]);
      }
      m_fps.executeUpdate();
      jsonObj.put("code", "1");

      jsonObj.put("____debug", m_fps.toString());
    } catch (Exception e) {
      jsonObj.put("____debug2", m_fps.toString());
      jsonObj.put("____debugerror", e.toString());
      jsonObj.put("code", "0");
    } finally {
      m_fps.close();
      m_fpsql.close();
    }
    return jsonObj.toString();
  }
Esempio n. 5
0
  public static void readJoinResult(String uuid, OutputStreamWriter outStream)
      throws SQLException, IOException, JSONException {
    final HashMap<String, String> tableInfo = getTableInfo(uuid);
    JSONObject jsonObj = new JSONObject();

    if (tableInfo == null || tableInfo.isEmpty()) {
      jsonObj.put("code", "0");
      jsonObj.put("message", "该表不存在");
      outStream.append(jsonObj.toString());
      return;
    }
    if (tableInfo.get("status").equals("INDEXING")) {
      jsonObj.put("code", "0");
      jsonObj.put("message", "正在创建索引中请稍后");
      outStream.append(jsonObj.toString());
      return;
    }
    Map stormconf = Utils.readStormConfig();

    Configuration conf = getConf(stormconf);
    com.alimama.mdrill.ui.service.AdhocOfflineService.readHiveResult(
        tableInfo.get("txtStorePath"), outStream, conf);
  }
Esempio n. 6
0
  public static String del(String userid, String user) throws Exception {
    JSONObject jsonObj = new JSONObject();
    jsonObj.put("code", "0");
    MySqlConn m_fpsql = getConn();

    Connection conn = m_fpsql.getConn();
    String strSql = "delete from users_json  where userid=? ";
    PreparedStatement m_fps = conn.prepareStatement(strSql);
    try {
      int index = 1;
      m_fps.setString(index++, userid);
      m_fps.executeUpdate();
      jsonObj.put("code", "1");
    } catch (Exception e) {
      jsonObj.put("message", e.toString());
      jsonObj.put("code", "0");
    } finally {
      m_fps.close();
      m_fpsql.close();
    }

    return jsonObj.toString();
  }
Esempio n. 7
0
  public static String offline(
      String projectName,
      String jsoncallback,
      String queryStr,
      String fl,
      String groupby,
      String mailto,
      String username,
      String jobname,
      String params,
      String leftjoin,
      String fq2,
      String limit2,
      String orderby2,
      String desc2)
      throws Exception {
    int limit = 0;
    if (limit2 != null) {
      limit = Integer.parseInt(limit2);
    }
    long t1 = System.currentTimeMillis();

    boolean isPartionByPt = false;
    if (projectName.equals("rpt_hitfake_auctionall_d")) {
      projectName = "rpt_p4padhoc_auction";
    }
    String hpart = "dt";
    if (projectName.equals("rpt_b2bad_hoc_memb_sum_d")) {
      isPartionByPt = true;
      hpart = "pt";
    }
    if (projectName.equals("r_rpt_tanx_adzone_total")) {
      hpart = "ds";
    }

    queryStr = WebServiceParams.query(queryStr);

    //		String sqlSort = WebServiceParams.sortHive(sort, order);

    TablePartion part = GetPartions.partion(projectName);
    String[] cores = GetShards.get(part.name, false, 10000);
    String[] ms = GetShards.get(part.name, true, 10000);
    String[] partionsAll = GetPartions.get(queryStr, part.parttype);
    GetPartions.Shards shard = GetPartions.getshard(part, partionsAll, cores, ms, 10000, 0);

    HashMap<String, String> filetypeMap = MdrillService.readFieldsFromSchemaXml(part.name);
    ArrayList<String> fqList =
        WebServiceParams.fqListHive(
            hpart, queryStr, shard, isPartionByPt, filetypeMap, null, null, null);
    StringBuffer sqlWhere = new StringBuffer();
    String join = " where ";
    for (String fq : fqList) {
      sqlWhere.append(join);
      sqlWhere.append(fq);
      join = " and ";
    }

    ArrayList<String> groupFields = WebServiceParams.groupFields(groupby);
    StringBuffer sqlGroup = new StringBuffer();
    String daycols = "";
    join = " group by ";
    for (String field : groupFields) {
      sqlGroup.append(join);
      sqlGroup.append(field);
      if (field.equals("thedate")) {
        daycols = "日期,";
      }
      join = ",";
    }

    HashMap<String, String> colMap = new HashMap<String, String>();
    ArrayList<String> showFields = WebServiceParams.showHiveFields(fl);
    StringBuffer cols = new StringBuffer();
    join = "";
    int nameindex = 0;
    for (String field : groupFields) {
      cols.append(join);
      cols.append(field);
      String alias = "tmp_" + nameindex++;
      cols.append(" as " + alias);
      colMap.put(field, alias);
      join = ",";
    }
    for (String field : showFields) {
      if (field.equals("thedate")) {
        daycols = "日期,";
      }
      if (!groupFields.contains(field)) {
        cols.append(join);
        cols.append(field);
        String alias = "tmp_" + nameindex++;
        cols.append(" as " + alias);
        colMap.put(field, alias);
        join = ",";
      }
    }

    HigoAdhocJoinParams[] joins = AdhocWebServiceParams.parseJoinsHive(leftjoin, shard);

    for (int i = 0; i < joins.length; i++) {
      HigoAdhocJoinParams jp = joins[i];
      if (!groupFields.contains(jp.leftkey) && !showFields.contains(jp.leftkey)) {
        cols.append(join);
        cols.append(jp.leftkey);
        String alias = "tmp_" + nameindex++;
        cols.append(" as " + alias);
        colMap.put(jp.leftkey, alias);
        join = ",";
      }
    }

    String hql =
        "select "
            + cols.toString()
            + " from "
            + projectName
            + " "
            + sqlWhere.toString()
            + " "
            + sqlGroup.toString()
            + " ";

    HashMap<String, String> colMap2 = new HashMap<String, String>();

    if (joins.length > 0) {
      StringBuffer buffer = new StringBuffer();
      buffer.append("select ");
      join = "";
      for (String field : groupFields) {
        buffer.append(join);
        buffer.append("jl1.");
        buffer.append(colMap.get(field));
        String alias = "tmp_" + nameindex++;
        buffer.append(" as " + alias);
        colMap2.put(field, alias);
        join = ",";
      }
      for (String field : showFields) {
        if (!groupFields.contains(field)) {
          buffer.append(join);
          buffer.append("jl1.");
          buffer.append(colMap.get(field));
          String alias = "tmp_" + nameindex++;
          buffer.append(" as " + alias);
          colMap2.put(field, alias);
          join = ",";
        }
      }

      for (int i = 0; i < joins.length; i++) {
        HigoAdhocJoinParams jp = joins[i];
        for (String field : jp.fl) {
          buffer.append(join);
          buffer.append("jr" + i + ".");
          buffer.append(field);
          String alias = "tmp_" + nameindex++;
          buffer.append(" as " + alias);
          join = ",";
        }
      }

      buffer.append(" from ");

      buffer.append(" (" + hql + ") jl1 ");

      for (int i = 0; i < joins.length; i++) {
        HigoAdhocJoinParams jp = joins[i];
        buffer.append(
            " join ("
                + jp.frQuer
                + ") jr"
                + i
                + " on jl1."
                + colMap.get(jp.leftkey)
                + "==jr"
                + i
                + "."
                + jp.rightkey
                + " ");
      }

      hql = buffer.toString();
    }

    //		String fq2,int limit

    ArrayList<String> fq2list =
        WebServiceParams.fqListHive(
            hpart, fq2, shard, isPartionByPt, filetypeMap, colMap, colMap2, "fq2");
    if (fq2list.size() > 0) {
      StringBuffer buffer = new StringBuffer();
      buffer.append("select * from ");
      buffer.append("(" + hql + ") fq2");
      String join2 = " where ";
      for (String fq : fq2list) {
        buffer.append(join2);
        buffer.append(fq);
        join2 = " and ";
      }
      hql = buffer.toString();
    }

    if (orderby2 != null) {
      hql = hql + " order by fq2." + colMap2.get(orderby2) + " " + desc2;
    }

    if (limit > 1000000) {
      limit = 1000000;
    }
    if (limit > 0) {
      hql = hql + " limit " + limit;
    }

    String md5 = MD5.getMD5(hql);

    SimpleDateFormat fmt = new SimpleDateFormat("yyyyMMdd");
    String day = fmt.format(new Date());

    Map stormconf = Utils.readStormConfig();
    String hdpConf = (String) stormconf.get("hadoop.conf.dir");
    String connstr = (String) stormconf.get("higo.download.offline.conn");
    String uname = (String) stormconf.get("higo.download.offline.username");
    String passwd = (String) stormconf.get("higo.download.offline.passwd");
    String store =
        (String) stormconf.get("higo.download.offline.store")
            + "/"
            + day
            + "/"
            + java.util.UUID.randomUUID().toString();
    MySqlConn conn = new MySqlConn(connstr, uname, passwd);
    MysqlInfo info = new MysqlInfo(conn);
    if (username == null || username.length() <= 0) {
      username = "******";
    }

    StringBuffer sqlbuff = new StringBuffer();
    // int jobsize=info.getUserJobname(username, jobname, sqlbuff).size();
    sqlbuff.append(";");
    int size = info.getUser(username, true, sqlbuff).size();
    JSONObject jsonObj = new JSONObject();
    jsonObj.put("sqlbuff", sqlbuff.toString());
    jsonObj.put("size", size);
    // if(jobsize>0)
    // {
    // jsonObj.put("code", "0");
    // jsonObj.put("message", "之前已经有叫"+jobname+"的任务,请换个名字");
    // }else
    //
    if (size < 5) {
      MysqlCallback callback = new MysqlCallback(conn);

      String[] pcols =
          params == null
              ? new String[0]
              : new String(daycols + params.replaceAll("维度指标:", "").replaceAll("。.*$", ""))
                  .split(",");
      StringBuffer liststat = new StringBuffer();
      StringBuffer listgroup = new StringBuffer();
      for (String s : pcols) {
        if (AdhocOfflineService.isStatFields(s)) {
          liststat.append(s);
          liststat.append(",");
        } else {
          listgroup.append(s);
          listgroup.append(",");
        }
      }

      callback.setCols(
          (params == null || params.isEmpty())
              ? cols.toString()
              : listgroup.toString() + liststat.toString());
      OfflineDownload download = new OfflineDownload();
      download.setOffline(callback);
      if (mailto == null || mailto.length() <= 0) {
        mailto = "*****@*****.**";
      }

      StringBuffer setSql = new StringBuffer();
      StringBuffer cleanSql = new StringBuffer();
      for (int i = 0; i < joins.length; i++) {
        HigoAdhocJoinParams jp = joins[i];

        setSql.append(jp.createSql);
        setSql.append(";");
        setSql.append(jp.addData);
        setSql.append(";");

        cleanSql.append(";");
        cleanSql.append(jp.DropSql);
      }

      download.setMailto(mailto);
      download.setHql(
          setSql.toString()
              + " INSERT OVERWRITE DIRECTORY '"
              + store
              + "' "
              + hql
              + "  "
              + cleanSql.toString());

      download.setUseName(username);
      if (jobname == null || jobname.length() <= 0) {
        jobname = day + "_" + md5;
      }
      download.setJobName(jobname);
      download.setDisplayParams((params == null || params.isEmpty()) ? hql : params);
      download.setStoreDir(store);
      download.setConfdir(hdpConf);
      download.setSqlMd5(md5);
      download.run();

      long t2 = System.currentTimeMillis();
      jsonObj.put("code", "1");

      jsonObj.put(
          "message",
          "数据下载中...完成后将会通过<b style=\"color:red\">旺旺</b>和<b style=\"color:red\">邮件</b>通知");
      jsonObj.put("uuid", callback.getUuid());
      jsonObj.put("debug", callback.toString());
      jsonObj.put("timedebug", String.valueOf(t2 - t1));

    } else {
      jsonObj.put("code", "0");
      jsonObj.put("message", "还有" + size + "个任务没有完成数据下载,请稍后提交");
    }

    if (jsoncallback != null && jsoncallback.length() > 0) {
      return jsoncallback + "(" + jsonObj.toString() + ")";
    } else {
      return jsonObj.toString();
    }
  }
Esempio n. 8
0
  public static String getJson(String daystart, boolean extamsg)
      throws JSONException, SQLException {
    JSONObject jsonObj = new JSONObject();
    jsonObj.put("code", "1");
    MySqlConn m_fpsql = getConn();

    String strSql =
        "select userid,email,cname,role,permission,'-' as queryday,opuser,optime from users_json order by role desc,userid";
    if (daystart != null && !daystart.isEmpty()) {
      strSql =
          " select userid,email,cname,role,permission,opuser,optime from users_json where (cname not in (select nick from query_analyser.day_user_pv  where queryday>'"
              + daystart
              + "')) order by role desc,userid";
    }

    if (extamsg) {
      strSql =
          " select a.userid as userid,a.email as email ,a.cname as cname,a.role as role,a.permission as permission,a.opuser as opuser,a.optime as optime, b.queryday as queryday "
              + "from ( "
              + ""
              + strSql
              + ") "
              + "  a "
              + " left join (select nick,max(queryday) as queryday from query_analyser.day_user_pv group by nick ) b"
              + " on a.cname=b.nick order by role desc,queryday desc";
    }

    jsonObj.put("_exehql", strSql);

    Connection conn = m_fpsql.getConn();
    Statement stmt = conn.createStatement();
    try {
      ResultSet res = stmt.executeQuery(strSql);
      JSONArray userlist = new JSONArray();
      while (res.next()) {
        JSONObject item = new JSONObject();
        item.put("userid", String.valueOf(res.getString("userid")));
        item.put("email", String.valueOf(res.getString("email")));
        item.put("queryday", String.valueOf(res.getString("queryday")));

        item.put("cname", String.valueOf(res.getString("cname")));

        item.put("role", Integer.parseInt(res.getString("role")));
        item.put("permission", new JSONArray(res.getString("permission")));
        item.put("opuser", String.valueOf(res.getString("opuser")));
        item.put("optime", String.valueOf(res.getString("optime")));

        userlist.put(item);
      }
      m_fpsql.close();
      jsonObj.put("code", "1");
      jsonObj.put("message", "success");
      JSONObject dddd = new JSONObject();
      dddd.put("users", userlist);
      jsonObj.put("data", dddd);
    } catch (Exception e) {
      jsonObj.put("message", e.toString());
      jsonObj.put("code", "0");
    } finally {
      m_fpsql.close();
    }

    return jsonObj.toString();
  }
Esempio n. 9
0
  public static String create(
      String tableShowName, String colsShowName, String splitString, String username, String joins)
      throws Exception {
    JSONObject jsonObj = new JSONObject();

    SimpleDateFormat fmt = new SimpleDateFormat("yyyyMMdd");
    String day = fmt.format(new Date());
    Map stormconf = Utils.readStormConfig();
    String hdpConf = (String) stormconf.get("hadoop.conf.dir");
    String connstr = (String) stormconf.get("higo.download.offline.conn");
    String uname = (String) stormconf.get("higo.download.offline.username");
    String passwd = (String) stormconf.get("higo.download.offline.passwd");
    String store =
        (String) stormconf.get("higo.download.offline.store")
            + "/"
            + day
            + "/"
            + java.util.UUID.randomUUID().toString();
    MySqlConn m_fpsql = new MySqlConn(connstr, uname, passwd);

    String tableName = java.util.UUID.randomUUID().toString();
    Connection conn = m_fpsql.getConn();
    String strSql =
        "insert into adhoc_joins "
            + "(tableShowName,tableName,colsShowName,colsName,colsType,splitString,txtStorePath,indexStorePath,status,username,createtime,lastuptime,joins,percent)"
            + "values"
            + "(?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
    PreparedStatement m_fps = conn.prepareStatement(strSql);
    try {
      int index = 1;
      m_fps.setString(index++, tableShowName);
      m_fps.setString(index++, tableName);
      m_fps.setString(index++, colsShowName);
      StringBuffer colsName = new StringBuffer();
      StringBuffer colsType = new StringBuffer();
      String[] cols = colsShowName.split(",");
      String join = "";
      HashMap<String, String> colsNames = new HashMap<String, String>();
      for (int i = 0; i < cols.length; i++) {
        String colname = "cols_" + i + "_s";
        colsName.append(join);
        colsName.append(colname);
        colsType.append(join);
        colsType.append("string");
        colsNames.put(cols[i], colname);
        join = ",";
      }
      m_fps.setString(index++, colsName.toString());
      m_fps.setString(index++, colsType.toString());
      m_fps.setString(index++, MakeIndex.parseSplit(splitString));
      m_fps.setString(index++, store + "/txt");
      m_fps.setString(index++, store + "/index");
      m_fps.setString(index++, "init");
      m_fps.setString(index++, username);

      long nowtims = System.currentTimeMillis();
      m_fps.setTimestamp(index++, new java.sql.Timestamp(nowtims));
      m_fps.setTimestamp(index++, new java.sql.Timestamp(nowtims));

      StringBuffer joinbuff = new StringBuffer();
      String joinchar = "";
      for (String joindesc : joins.split(",")) {
        String[] arr = joindesc.split(":");
        joinbuff.append(joinchar);
        joinbuff.append(arr[0]);
        joinbuff.append(":");
        joinbuff.append(arr[1]);
        joinbuff.append(":");
        joinbuff.append(colsNames.get(arr[2]));
        joinchar = ",";
      }
      m_fps.setString(index++, joinbuff.toString());
      m_fps.setString(index++, "");

      m_fps.executeUpdate();
      jsonObj.put("code", "1");
      jsonObj.put("tableid", tableName);
      jsonObj.put("____debug", m_fps.toString());
    } catch (Exception e) {
      jsonObj.put("____debug2", m_fps.toString());
      jsonObj.put("____debugerror", e.toString());
      jsonObj.put("code", "0");
    } finally {
      m_fps.close();
      m_fpsql.close();
    }

    return jsonObj.toString();
  }
Esempio n. 10
0
  public static String getUserTables(
      String username, int start, int rows, int type, String callback)
      throws SQLException, JSONException {
    SimpleDateFormat fmt = new SimpleDateFormat("yyyyMMdd");
    Map stormconf = Utils.readStormConfig();
    String connstr = (String) stormconf.get("higo.download.offline.conn");
    String uname = (String) stormconf.get("higo.download.offline.username");
    String passwd = (String) stormconf.get("higo.download.offline.passwd");
    MySqlConn m_fpsql = new MySqlConn(connstr, uname, passwd);

    Connection conn = m_fpsql.getConn();
    Statement stmt = conn.createStatement();

    String strsqlJoin =
        "select '1' as source "
            + ",tableShowName as tableShowName"
            + ",tableName as tableName"
            + ",colsShowName as colsShowName"
            + ",colsName as colsName"
            + ",colsType as colsType"
            + ",splitString as splitString"
            + ",txtStorePath as txtStorePath"
            + ",indexStorePath as indexStorePath"
            + ",'0' as extval"
            + ",'0' as isfinish"
            + ",status as status"
            + ",username as username"
            + ",createtime as createtime"
            + ",lastuptime as lastuptime"
            + ",joins as joins"
            + ",'2' as stage"
            + ",percent as percent"
            + ",resultkb as resultkb"
            + " from adhoc_joins where username='******'", "")
            + "' and status<>'DEL'  ";

    StringBuffer bufferSql = new StringBuffer();
    if (type == 0) // 个人中心列表
    {
      String strsqlDownload =
          "select '2' as source "
              + ",jobname as tableShowName"
              + ",uuid as tableName"
              + ",'empty' as colsShowName"
              + ",'empty' as colsName"
              + ",'empty' as colsType"
              + ",'empty' as splitString"
              + ",storedir as txtStorePath"
              + ",'empty' as indexStorePath"
              + ",extval as extval"
              + ",isfinish as isfinish"
              + ",'INDEX' as status"
              + ",username as username"
              + ",starttime as createtime"
              + ",endtime as lastuptime"
              + ",'' as joins"
              + ",stage as stage"
              + ",percent as percent"
              + ",resultkb as resultkb"
              + " from adhoc_download where username='******'", "")
              + "' and status<>'DEL'  ";
      bufferSql.append(
          "select source,tableShowName,tableName,colsShowName"
              + ",colsName,colsType,splitString,txtStorePath,indexStorePath,extval,status,username,"
              + "createtime,lastuptime,joins,stage,percent,resultkb");
      bufferSql.append(
          " from ("
              + strsqlJoin
              + " union "
              + strsqlDownload
              + ") tmp order by tmp.createtime desc limit "
              + start
              + ","
              + rows
              + " ");
    }
    if (type == 1) // for join
    {
      bufferSql.append(
          strsqlJoin
              + " and status='INDEX' order by createtime desc limit "
              + start
              + ","
              + rows
              + " ");
    }

    String sql = bufferSql.toString();
    TableJoin.LOG.info("getUserTables:" + sql);
    ResultSet res = stmt.executeQuery(sql);
    JSONObject jsonObj = new JSONObject();
    jsonObj.put("code", "1");
    jsonObj.put("_exehql", sql);
    JSONArray jsonArray = new JSONArray();

    while (res.next()) {
      JSONObject item = new JSONObject();

      item.put("source", res.getString("source"));
      item.put("tableShowName", res.getString("tableShowName")); // 展示名称
      item.put("tableName", res.getString("tableName")); // uuid
      item.put("colsShowName", res.getString("colsShowName"));
      item.put("colsName", res.getString("colsName"));
      item.put("colsType", res.getString("colsType"));
      item.put("splitString", res.getString("splitString"));
      item.put("txtStorePath", res.getString("txtStorePath"));
      item.put("indexStorePath", res.getString("indexStorePath"));
      item.put("extval", res.getString("extval"));
      item.put("status", res.getString("status"));
      item.put("username", res.getString("username"));
      item.put("createtime", res.getString("createtime"));
      item.put("lastuptime", res.getString("lastuptime"));
      item.put("joins", res.getString("joins"));
      item.put("stage", res.getString("stage"));
      item.put("percent", res.getString("percent"));
      item.put("resultkb", res.getString("resultkb"));
      boolean issuccess =
          res.getString("status").equals("INDEX") && res.getString("extval").equals("0");
      item.put(
          "proccess", parsePercent(res.getString("stage"), res.getString("percent"), issuccess));
      item.put("isActive", String.valueOf(issuccess));
      jsonArray.put(item);
    }
    HashMap<String, String> cnt = getUserTablesCount(username, type);

    JSONObject data = new JSONObject();
    data.put("list", jsonArray);
    data.put("total", cnt.get("cnt"));
    jsonObj.put("data", data);
    jsonObj.put("total_debug", new JSONObject(cnt));
    m_fpsql.close();

    if (callback != null && callback.length() > 0) {
      return callback + "(" + jsonObj.toString() + ")";
    } else {
      return jsonObj.toString();
    }
  }
Esempio n. 11
0
  public static String addTxt(final String tableName, final String store, final String callback)
      throws JSONException, SQLException {
    JSONObject jsonObj = new JSONObject();
    final HashMap<String, String> tableInfo = getTableInfo(tableName);
    if (tableInfo == null || tableInfo.isEmpty()) {
      jsonObj.put("code", "0");
      jsonObj.put("message", "该表不存在");
      if (callback != null && callback.length() > 0) {
        return callback + "(" + jsonObj.toString() + ")";
      } else {
        return jsonObj.toString();
      }
    }
    if (tableInfo.get("status").equals("INDEXING")) {
      jsonObj.put("code", "0");
      jsonObj.put("message", "正在创建索引中请稍后");
      if (callback != null && callback.length() > 0) {
        return callback + "(" + jsonObj.toString() + ")";
      } else {
        return jsonObj.toString();
      }
    }

    TableJoin.updatePercent(tableName, "Stage-1 map = 0%,  reduce = 0%", "INDEXING");

    jsonObj.put("code", "1");

    EXECUTE.submit(
        new Runnable() {
          @Override
          public void run() {
            try {
              SimpleDateFormat fmt = new SimpleDateFormat("yyyyMMdd");
              String day = fmt.format(new Date());
              Map stormconf = Utils.readStormConfig();
              Configuration conf = getConf(stormconf);
              FileSystem fs = FileSystem.get(conf);
              if (!fs.exists(new Path(store))) {
                fs.mkdirs(new Path(store));
              }

              Path txtpath = new Path(tableInfo.get("txtStorePath"));
              if (!fs.exists(txtpath)) {
                fs.mkdirs(txtpath);
              }

              for (FileStatus outpath : fs.listStatus(new Path(store))) {
                if (!outpath.isDir()) {
                  fs.rename(
                      outpath.getPath(),
                      new Path(
                          txtpath, outpath.getPath().getName() + "_" + System.currentTimeMillis()));
                }
              }
              fs.delete(new Path(store), true);

              Path basepath = new Path(MdrillService.getBasePath());
              FileStatus[] tablelist = fs.listStatus(basepath);
              String solrHome = tablelist[0].getPath().toString();

              for (FileStatus tbl : tablelist) {
                if (tbl.isDir() && fs.exists(new Path(tbl.getPath(), "solr"))) {
                  solrHome = tbl.getPath().toString();
                }
              }

              HashSet<String> inputs = new HashSet<String>();
              inputs.add(txtpath.getName());

              TableJoin.updateKb(tableName, HadoopUtil.duSize(fs, txtpath));

              String index =
                  (String) stormconf.get("higo.download.offline.store")
                      + "/"
                      + day
                      + "/tmp_"
                      + java.util.UUID.randomUUID().toString();
              MakeIndex.make(
                  fs,
                  solrHome,
                  conf,
                  "txt",
                  txtpath.getParent().toString(),
                  inputs,
                  "*",
                  tableInfo.get("indexStorePath"),
                  new Path(index),
                  1,
                  tableInfo.get("splitString"),
                  false,
                  tableInfo.get("colsName"),
                  new updateStatus() {

                    @Override
                    public void update(int statge, Job job) {
                      try {
                        TableJoin.LOG.info(
                            "update "
                                + tableName
                                + ",stage:"
                                + statge
                                + ",map:"
                                + job.mapProgress()
                                + ",reduce:"
                                + job.reduceProgress()
                                + ":INDEXING");
                        String percent =
                            "Stage-"
                                + statge
                                + " map = "
                                + (job.mapProgress() * 100)
                                + "%,  reduce = "
                                + (job.reduceProgress() * 100)
                                + "%";
                        TableJoin.updatePercent(tableName, percent, "INDEXING");
                      } catch (Exception e) {
                        TableJoin.LOG.error("updatePercent", e);
                      }
                    }

                    @Override
                    public void finish() {
                      try {
                        TableJoin.LOG.info("update " + tableName + ",INDEX");
                        TableJoin.updatePercent(
                            tableName, "Stage-2 map = 100%,  reduce = 100%", "INDEX");
                      } catch (Exception e) {
                        TableJoin.LOG.error("updatePercent", e);
                      }
                    }
                  });
            } catch (Exception e) {
              TableJoin.LOG.error("make index", e);
              try {
                TableJoin.updatePercent(tableName, "Stage-2 map = 0%,  reduce = 0%", "FAIL");
              } catch (Exception e2) {
                TableJoin.LOG.error("updatePercent", e2);
              }
            }
          }
        });

    if (callback != null && callback.length() > 0) {
      return callback + "(" + jsonObj.toString() + ")";
    } else {
      return jsonObj.toString();
    }
  }