示例#1
0
  public void insertRecord(String tableName, String record) {

    Iterator<Table> it = Tables.iterator();
    TableList TL = null;
    Pages P = null;
    Table T = null;

    while (it.hasNext()) {
      T = it.next();
      if (T.getTableName().equals(tableName)) {
        TL = T.getLastPage();
        break;
      }
    }

    if (record.length() < TL.getAvailableSpace()) {
      P = MM.getPage(TL.getPageId(), tableName, 1);
      P.insertRec(record);
      T.updatePage(P);
      TL.setEndId(P.getEndId());
      TL.setAvailableSpace(P.getAvailableSpace());
    } else {
      P = new Pages(TL.getEndId() + 1, pageSize, TL.getPageId() + 1);
      P.insertRec(record);
      TL = new TableList(P.getStartId(), P.getEndId(), P.getPageId(), P.getAvailableSpace());
      P = MM.getPage(TL.getPageId(), tableName, 1);
      T.insertPage(P);
      T.insertTableList(TL);
    }
  }
示例#2
0
  public String getRecord(String tableName, int recordId) {

    Iterator<Table> it = Tables.iterator();
    int pageID = -2;
    Pages P = null;

    while (it.hasNext()) {
      Table T = it.next();
      if (T.getTableName().equals(tableName)) {
        pageID = T.getPageId(recordId);
        break;
      }
    }

    if (pageID == -1) {
      System.out.println("Invalid Record Request");
      return null;
    } else if (pageID == -2) {
      System.out.println("Invalid Table Name");
      return null;
    } else {
      P = MM.getPage(pageID, tableName, 0);
      return P.getRecFromId(recordId);
    }
  }
示例#3
0
 public static Properties getHiveSchema(Partition partition, Table table) {
   // Mimics function in Hive: MetaStoreUtils.getSchema(Partition, Table)
   return getHiveSchema(
       partition.getStorage(),
       partition.getColumns(),
       table.getDataColumns(),
       table.getParameters(),
       table.getDatabaseName(),
       table.getTableName(),
       table.getPartitionColumns());
 }
示例#4
0
 public static org.apache.hadoop.hive.metastore.api.Table toMetastoreApiTable(
     Table table, PrincipalPrivilegeSet privileges) {
   org.apache.hadoop.hive.metastore.api.Table result =
       new org.apache.hadoop.hive.metastore.api.Table();
   result.setDbName(table.getDatabaseName());
   result.setTableName(table.getTableName());
   result.setOwner(table.getOwner());
   result.setTableType(table.getTableType());
   result.setParameters(table.getParameters());
   result.setPartitionKeys(
       table
           .getPartitionColumns()
           .stream()
           .map(MetastoreUtil::toMetastoreApiFieldSchema)
           .collect(toList()));
   result.setSd(
       makeStorageDescriptor(table.getTableName(), table.getDataColumns(), table.getStorage()));
   result.setPrivileges(privileges);
   result.setViewOriginalText(table.getViewOriginalText().orElse(null));
   result.setViewExpandedText(table.getViewExpandedText().orElse(null));
   return result;
 }
示例#5
0
  public void populateDBInfo() {

    Iterator<Table> it = Tables.iterator();
    String fileName, line;
    Pages P;
    TableList TL = null;
    Table T;
    int recCount, pageCount;

    while (it.hasNext()) {
      // for all the tables
      T = it.next();

      P = null;
      TL = null;
      fileName = pathForData + T.getTableName() + ".csv";
      recCount = 0;
      pageCount = 0;

      try {
        BufferedReader br = new BufferedReader(new FileReader(fileName));
        // create an empty page
        P = new Pages(recCount, pageSize, pageCount++);

        while ((line = br.readLine()) != null) {
          // process the line.
          if (P.checkSpace(line.length())) {
            P.insertRec(line);
            recCount += 1;
          } else {
            // save the current page
            TL = new TableList(P.getStartId(), P.getEndId(), P.getPageId(), P.getAvailableSpace());
            T.insertPage(P);
            T.insertTableList(TL);
            /*System.out.println(T.noOfPages);
            for (String s : P.recordsInPage ){
            	System.out.println(s);
            }*/

            T.noOfPages += 1;

            // start a new page to store records.
            P = new Pages(recCount, pageSize, pageCount++);
            if (P.checkSpace(line.length())) {
              P.insertRec(line);
              recCount += 1;
            }
          }
        }
        br.close();
      } catch (FileNotFoundException e) {
        // Auto-generated catch block
        e.printStackTrace();
      } catch (IOException e) {
        // Auto-generated catch block
        e.printStackTrace();
      }

      // saving the last page
      if (P.getAvailableSpace() < pageSize) {
        TL = new TableList(P.getStartId(), P.getEndId(), P.getPageId(), P.getAvailableSpace());
        T.insertPage(P);
        T.insertTableList(TL);
        /*System.out.println(T.noOfPages);
        for (String s : P.recordsInPage ){
        	System.out.println(s);
        }*/

        T.noOfPages += 1;
      }
    }
  }