public void open() throws DbException, TransactionAbortedException {
   // some code goes here
   if (i_pos != null) {
     i = i_pos;
   } else {
     Catalog gc = Database.getCatalog();
     HeapFile file = (HeapFile) gc.getDbFile(tableid);
     i = file.iterator(tid);
   }
   i.open();
 }
Exemple #2
0
 /**
  * Retrieve the specified page with the associated permissions. Will acquire a lock and may block
  * if that lock is held by another transaction.
  *
  * <p>The retrieved page should be looked up in the buffer pool. If it is present, it should be
  * returned. If it is not present, it should be added to the buffer pool and returned. If there is
  * insufficient space in the buffer pool, an page should be evicted and the new page should be
  * added in its place.
  *
  * @param tid the ID of the transaction requesting the page
  * @param pid the ID of the requested page
  * @param perm the requested permissions on the page
  * @throws DbException
  * @throws TransactionAbortedException
  */
 public Page getPage(TransactionId tid, PageId pid, Permissions perm)
     throws DbException, TransactionAbortedException {
   lockManager.acquireLock(tid, pid, perm);
   if (pageIdToPages.containsKey(pid)) {
     return pageIdToPages.get(pid);
   }
   if (currentPages.get() == maxPages) {
     evictPage();
   }
   int tableId = pid.getTableId();
   Catalog catalog = Database.getCatalog();
   DbFile dbFile = catalog.getDatabaseFile(tableId);
   Page page = dbFile.readPage(pid);
   pageIdToPages.put(pid, page);
   currentPages.incrementAndGet();
   return page;
 }
 private void createAliasedTd() {
   Catalog gc = Database.getCatalog();
   TupleDesc old_td = gc.getTupleDesc(tableid);
   String[] newFieldAr = new String[old_td.numFields()];
   Type[] typeAr = new Type[old_td.numFields()];
   String field = null;
   for (int i = 0; i < newFieldAr.length; i++) {
     field = old_td.getFieldName(i);
     if (alias == null) {
       alias = "null";
     } else if (field == null) {
       field = "null";
     }
     newFieldAr[i] = alias + "." + field;
     typeAr[i] = old_td.getFieldType(i);
   }
   td = new TupleDesc(typeAr, newFieldAr);
 }
  public static void main(String[] args) throws Exception {
    if (args.length < 1 || args.length > 5) {
      System.out.println("Invalid number of arguments.\n" + usage);
      return;
    }

    String confDir = Server.DEFAULT_CONF_DIR;
    String outputDir = DEFAULT_OUTPUT_DIR;
    if (args.length >= 3 && args[1].equals("--conf")) {
      confDir = args[2];
      args = ParallelUtility.removeArg(args, 1);
      args = ParallelUtility.removeArg(args, 1);
    }

    if (args.length >= 3 && args[1].equals("--output")) {
      outputDir = args[2];
      args = ParallelUtility.removeArg(args, 1);
      args = ParallelUtility.removeArg(args, 1);
    }

    Catalog c = Database.getCatalog();

    SocketInfo[] workers = ParallelUtility.loadWorkers(confDir);
    c.loadSchema(args[0]);
    TableStats.computeStatistics();

    File catalogFile = new File(args[0]);

    for (SocketInfo worker : workers) {
      File folder = new File(outputDir + "/" + worker.getHost() + "_" + worker.getPort());
      folder.mkdirs();
      ParallelUtility.copyFileFolder(
          catalogFile, new File(folder.getAbsolutePath() + "/catalog.schema"), true);
    }

    TransactionId fateTid = new TransactionId();

    Iterator<Integer> tableIds = c.tableIdIterator();
    while (tableIds.hasNext()) {
      int tableid = tableIds.next();
      int numTuples = getTotalTuples(tableid);
      HeapFile h = (HeapFile) c.getDatabaseFile(tableid);

      int eachSplitSize = numTuples / workers.length;
      int[] splitSizes = new int[workers.length];
      Arrays.fill(splitSizes, eachSplitSize);
      for (int i = 0; i < numTuples % workers.length; i++) {
        splitSizes[i] += 1;
      }

      DbFileIterator dfi = h.iterator(fateTid);
      dfi.open();

      for (int i = 0; i < workers.length; i++) {
        ArrayList<Tuple> buffer = new ArrayList<Tuple>();
        for (int j = 0; j < splitSizes[i]; j++) {
          dfi.hasNext();
          buffer.add(dfi.next());
        }
        Iterator<TDItem> items = h.getTupleDesc().iterator();
        ArrayList<Type> types = new ArrayList<Type>();
        while (items.hasNext()) types.add(items.next().fieldType);

        writeHeapFile(
            buffer,
            new File(
                outputDir
                    + "/"
                    + workers[i].getHost()
                    + "_"
                    + workers[i].getPort()
                    + "/"
                    + c.getTableName(tableid)
                    + ".dat"),
            BufferPool.getPageSize(),
            types.toArray(new Type[] {}));
      }
    }
  }