// see DbFile.java for javadocs public ArrayList<Page> insertTuple(TransactionId tid, Tuple t) throws DbException, IOException, TransactionAbortedException { // some code goes here BufferPool bp = Database.getBufferPool(); int id = getId(), i, slots; ArrayList<Page> retlist = new ArrayList<Page>(); PageId pid = null; HeapPage p = null; for (i = 0; i < numPages(); i++) { pid = new HeapPageId(id, i); p = (HeapPage) bp.getPage(tid, pid, Permissions.READ_WRITE); slots = p.getNumEmptySlots(); if (slots > 0) { p.insertTuple(t); retlist.add(p); return retlist; } } // create new page and add tuple to it pid = new HeapPageId(id, i); raf.setLength(raf.length() + BufferPool.PAGE_SIZE); p = (HeapPage) bp.getPage(tid, pid, Permissions.READ_WRITE); p.insertTuple(t); retlist.add(p); return retlist; }
// see DbFile.java for javadocs public Page deleteTuple(TransactionId tid, Tuple t) throws DbException, TransactionAbortedException { // some code goes here BufferPool bp = Database.getBufferPool(); RecordId rid = t.getRecordId(); if (rid == null) { throw new DbException("Tuple is not a member of this file"); } HeapPage p = (HeapPage) bp.getPage(tid, rid.getPageId(), Permissions.READ_WRITE); p.deleteTuple(t); return p; }
/** * Given a matrix of tuples from SystemTestUtil.createRandomHeapFile, create an identical HeapFile * table * * @param tuples Tuples to create a HeapFile from * @param columns Each entry in tuples[] must have "columns == tuples.get(i).size()" * @param colPrefix String to prefix to the column names (the columns are named after their column * number by default) * @return a new HeapFile containing the specified tuples * @throws IOException if a temporary file can't be created to hand to HeapFile to open and read * its data */ public static HeapFile createDuplicateHeapFile( ArrayList<ArrayList<Integer>> tuples, int columns, String colPrefix) throws IOException { File temp = File.createTempFile("table", ".dat"); temp.deleteOnExit(); HeapFileEncoder.convert(tuples, temp, BufferPool.getPageSize(), columns); return Utility.openHeapFile(columns, colPrefix, temp); }
public Tuple getNext() throws NoSuchElementException, TransactionAbortedException { try { Tuple tuple = new Tuple(td); for (int i = 0; i < td.numFields(); i++) { IntField intf = IntField.createIntField(in.readInt()); tuple.setField(i, intf); } return tuple; } catch (EOFException eof) { throw new NoSuchElementException(eof.getMessage()); } catch (Exception e) { e.printStackTrace(); BufferPool.Instance().abortTransaction(tid); closeConnection(); throw new TransactionAbortedException(e); } }
public static void main(String[] args) throws Exception { if (args.length < 1 || args.length > 5) { System.out.println("Invalid number of arguments.\n" + usage); return; } String confDir = Server.DEFAULT_CONF_DIR; String outputDir = DEFAULT_OUTPUT_DIR; if (args.length >= 3 && args[1].equals("--conf")) { confDir = args[2]; args = ParallelUtility.removeArg(args, 1); args = ParallelUtility.removeArg(args, 1); } if (args.length >= 3 && args[1].equals("--output")) { outputDir = args[2]; args = ParallelUtility.removeArg(args, 1); args = ParallelUtility.removeArg(args, 1); } Catalog c = Database.getCatalog(); SocketInfo[] workers = ParallelUtility.loadWorkers(confDir); c.loadSchema(args[0]); TableStats.computeStatistics(); File catalogFile = new File(args[0]); for (SocketInfo worker : workers) { File folder = new File(outputDir + "/" + worker.getHost() + "_" + worker.getPort()); folder.mkdirs(); ParallelUtility.copyFileFolder( catalogFile, new File(folder.getAbsolutePath() + "/catalog.schema"), true); } TransactionId fateTid = new TransactionId(); Iterator<Integer> tableIds = c.tableIdIterator(); while (tableIds.hasNext()) { int tableid = tableIds.next(); int numTuples = getTotalTuples(tableid); HeapFile h = (HeapFile) c.getDatabaseFile(tableid); int eachSplitSize = numTuples / workers.length; int[] splitSizes = new int[workers.length]; Arrays.fill(splitSizes, eachSplitSize); for (int i = 0; i < numTuples % workers.length; i++) { splitSizes[i] += 1; } DbFileIterator dfi = h.iterator(fateTid); dfi.open(); for (int i = 0; i < workers.length; i++) { ArrayList<Tuple> buffer = new ArrayList<Tuple>(); for (int j = 0; j < splitSizes[i]; j++) { dfi.hasNext(); buffer.add(dfi.next()); } Iterator<TDItem> items = h.getTupleDesc().iterator(); ArrayList<Type> types = new ArrayList<Type>(); while (items.hasNext()) types.add(items.next().fieldType); writeHeapFile( buffer, new File( outputDir + "/" + workers[i].getHost() + "_" + workers[i].getPort() + "/" + c.getTableName(tableid) + ".dat"), BufferPool.getPageSize(), types.toArray(new Type[] {})); } } }