@Test public void new_getFieldTypeTest() { int length = 100; String name = "td"; TupleDesc td = Utility.getTupleDesc(length, name); // Lower than index bound. try { td.getFieldType(length + 1); fail("expected exception"); } catch (NoSuchElementException e) { } // Higher than index bound. try { td.getFieldType(-1); fail("expected exception"); } catch (NoSuchElementException e) { } // Check each name. for (int i = 0; i < length; i++) { assertEquals(Type.INT_TYPE, td.getFieldType(i)); } }
/** Suck up tuples from the source file. */ private Tuple readNextTuple(DataInputStream dis, int slotId) throws NoSuchElementException { // if associated bit is not set, read forward to the next tuple, and // return null. if (!getSlot(slotId)) { for (int i = 0; i < td.getSize(); i++) { try { dis.readByte(); } catch (IOException e) { throw new NoSuchElementException("error reading empty tuple"); } } return null; } // read fields in the tuple Tuple t = new Tuple(td); RecordID rid = new RecordID(pid, slotId); t.setRecordID(rid); try { for (int j = 0; j < td.numFields(); j++) { Field f = td.getType(j).parse(dis); t.setField(j, f); } } catch (java.text.ParseException e) { // e.printStackTrace(); throw new NoSuchElementException("parsing error!"); } return t; }
/** Unit test for TupleDesc.getSize() */ @Test public void getSize() { int[] lengths = new int[] {1, 2, 1000}; for (int len : lengths) { TupleDesc td = Utility.getTupleDesc(len); assertEquals(len * Type.INT_TYPE.getLen(), td.getSize()); } }
/** Unit test for TupleDesc.numFields() */ @Test public void numFields() { int[] lengths = new int[] {1, 2, 1000}; for (int len : lengths) { TupleDesc td = Utility.getTupleDesc(len); assertEquals(len, td.numFields()); } }
// Test mix of INT_TYPE and STRING_TYPE. @Test public void new_getSizeTest() { Type[] typeAr = new Type[] {Type.INT_TYPE, Type.INT_TYPE, Type.STRING_TYPE, Type.STRING_TYPE}; String[] fieldAr = new String[] {"number0", "number1", "string0", "string1"}; TupleDesc td = new TupleDesc(typeAr, fieldAr); int length = 2 * Type.INT_TYPE.getLen() + 2 * Type.STRING_TYPE.getLen(); assertEquals(length, td.getSize()); }
/** Unit test for TupleDesc.getType() */ @Test public void getType() { int[] lengths = new int[] {1, 2, 1000}; for (int len : lengths) { TupleDesc td = Utility.getTupleDesc(len); for (int i = 0; i < len; ++i) assertEquals(Type.INT_TYPE, td.getFieldType(i)); } }
/** * Generates a byte array representing the contents of this page. Used to serialize this page to * disk. * * <p>The invariant here is that it should be possible to pass the byte array generated by * getPageData to the HeapPage constructor and have it produce an identical HeapPage object. * * @see #HeapPage * @return A byte array correspond to the bytes of this page. */ public byte[] getPageData() { // int len = header.length*4 + BufferPool.PAGE_SIZE; int len = BufferPool.PAGE_SIZE; ByteArrayOutputStream baos = new ByteArrayOutputStream(len); DataOutputStream dos = new DataOutputStream(baos); // create the header of the page try { dos.write(header.getHeader()); } catch (IOException e) { // this really shouldn't happen e.printStackTrace(); } // create the tuples for (int i = 0; i < numSlots; i++) { // empty slot if (!getSlot(i)) { for (int j = 0; j < td.getSize(); j++) { try { dos.writeByte(0); } catch (IOException e) { e.printStackTrace(); } } continue; } // non-empty slot for (int j = 0; j < td.numFields(); j++) { Field f = tuples[i].getField(j); try { f.serialize(dos); } catch (IOException e) { e.printStackTrace(); } } } // padding int zerolen = BufferPool.PAGE_SIZE - numSlots * td.getSize() - header.length(); byte[] zeroes = new byte[zerolen]; try { dos.write(zeroes, 0, zerolen); } catch (IOException e) { e.printStackTrace(); } try { dos.flush(); } catch (IOException e) { e.printStackTrace(); } return baos.toByteArray(); }
/** Test one prarmeter constructor, it should set the filed name to null. */ @Test public void new_oneParamsConTest() { int[] lengths = new int[] {1, 2, 1000}; for (int len : lengths) { TupleDesc td = Utility.getTupleDesc(len); for (int i = 0; i < len; i++) { assertEquals("", td.getFieldName(i)); } } }
/** * Returns the TupleDesc with field names from the underlying HeapFile, prefixed with the * tableAlias string from the constructor. This prefix becomes useful when joining tables * containing a field(s) with the same name. * * @return the TupleDesc with field names from the underlying HeapFile, prefixed with the * tableAlias string from the constructor. */ public TupleDesc getTupleDesc() { TupleDesc tup = Database.getCatalog().getTupleDesc(id); int length = tup.numFields(); String[] field = new String[length]; Type[] types = new Type[length]; for (int i = 0; i < length; i++) { types[i] = tup.getFieldType(i); field[i] = alias + "." + tup.getFieldName(i); } return new TupleDesc(types, field); }
/** * Create a new TableStats object, that keeps track of statistics on each column of a table * * @param tableid The table over which to compute statistics * @param ioCostPerPage The cost per page of IO. This doesn't differentiate between * sequential-scan IO and disk seeks. */ public TableStats(int tableid, int ioCostPerPage) { // For this function, we use the DbFile for the table in question, // then scan through its tuples and calculate the values that you // to build the histograms. // TODO: Fill out the rest of the constructor. // Feel free to change anything already written, it's only a guideline this.ioCostPerPage = ioCostPerPage; DbFile file = Database.getCatalog().getDbFile(tableid); tupleDesc = file.getTupleDesc(); numPages = ((HeapFile) file).numPages(); numTuples = 0; int numFields = tupleDesc.numFields(); // TODO: what goes here? statistics = new ArrayList<Object>(); for (int i = 0; i < numFields; i++) { if (Type.INT_TYPE.equals(tupleDesc.getFieldType(i))) { statistics.add(new IntStatistics(NUM_HIST_BINS)); } else { statistics.add(new StringHistogram(NUM_HIST_BINS)); } } final DbFileIterator iter = file.iterator(null); try { iter.open(); while (iter.hasNext()) { Tuple t = iter.next(); numTuples++; // TODO: and here? for (int i = 0; i < numFields; i++) { if (Type.INT_TYPE.equals(tupleDesc.getFieldType(i))) { ((IntStatistics) statistics.get(i)).addValue(((IntField) t.getField(i)).getValue()); } else { ((StringHistogram) statistics.get(i)) .addValue(((StringField) t.getField(i)).getValue()); } } } iter.close(); } catch (DbException e) { e.printStackTrace(); } catch (TransactionAbortedException e) { e.printStackTrace(); } }
@Test public void new_iteratorTest() { int[] lengths = new int[] {1, 2, 1000}; for (int len : lengths) { TupleDesc td = Utility.getTupleDesc(len); Iterator<TupleDesc.TDItem> i = td.iterator(); while (i.hasNext()) { TupleDesc.TDItem item = i.next(); assertEquals(Type.INT_TYPE, item.fieldType); assertEquals("", item.fieldName); } } }
private void createAliasedTd() { Catalog gc = Database.getCatalog(); TupleDesc old_td = gc.getTupleDesc(tableid); String[] newFieldAr = new String[old_td.numFields()]; Type[] typeAr = new Type[old_td.numFields()]; String field = null; for (int i = 0; i < newFieldAr.length; i++) { field = old_td.getFieldName(i); if (alias == null) { alias = "null"; } else if (field == null) { field = "null"; } newFieldAr[i] = alias + "." + field; typeAr[i] = old_td.getFieldType(i); } td = new TupleDesc(typeAr, newFieldAr); }
/** * Estimate the selectivity of predicate <tt>field op constant</tt> on the table. * * @param field The field over which the predicate ranges * @param op The logical operation in the predicate * @param constant The value against which the field is compared * @return The estimated selectivity (fraction of tuples that satisfy) the predicate */ public double estimateSelectivity(int field, Predicate.Op op, Field constant) { // TODO: some code goes here if (Type.INT_TYPE.equals(tupleDesc.getFieldType(field))) { return ((IntStatistics) statistics.get(field)) .estimateSelectivity(op, ((IntField) constant).getValue()); } else { return ((StringHistogram) statistics.get(field)) .estimateSelectivity(op, ((StringField) constant).getValue()); } }
/** Unit test for TupleDesc.nameToId() */ @Test public void nameToId() { int[] lengths = new int[] {1, 2, 1000}; String prefix = "test"; for (int len : lengths) { // Make sure you retrieve well-named fields TupleDesc td = Utility.getTupleDesc(len, prefix); for (int i = 0; i < len; ++i) { assertEquals(i, td.fieldNameToIndex(prefix + i)); } // Make sure you throw exception for non-existent fields try { td.fieldNameToIndex("foo"); Assert.fail("foo is not a valid field name"); } catch (NoSuchElementException e) { // expected to get here } // Make sure you throw exception for null searches try { td.fieldNameToIndex(null); Assert.fail("null is not a valid field name"); } catch (NoSuchElementException e) { // expected to get here } // Make sure you throw exception when all field names are null td = Utility.getTupleDesc(len); try { td.fieldNameToIndex(prefix); Assert.fail("no fields are named, so you can't find it"); } catch (NoSuchElementException e) { // expected to get here } } }
@Test public void new_fieldNameToIndexTest() { Type[] typeAr = new Type[] {Type.INT_TYPE, Type.INT_TYPE, Type.INT_TYPE}; String[] fieldAr = new String[] {"number0", "number1", "number0"}; TupleDesc td = new TupleDesc(typeAr, fieldAr); // Throw exception if name not exist. try { td.fieldNameToIndex("a_not_existing_name"); fail("expected exception"); } catch (NoSuchElementException e) { } assertEquals(1, td.fieldNameToIndex("number1")); // Return the first if duplicated names exist. assertEquals(0, td.fieldNameToIndex("number0")); String[] nullFieldAr = new String[] {null, null, null}; TupleDesc nullTD = new TupleDesc(typeAr, nullFieldAr); // assertEquals(0, nullTD.fieldNameToIndex(null)); // Field name can be set to null manually. }
/** * Returns the TupleDesc with field names from the underlying HeapFile, prefixed with the * tableAlias string from the constructor. This prefix becomes useful when joining tables * containing a field(s) with the same name. * * @return the TupleDesc with field names from the underlying HeapFile, prefixed with the * tableAlias string from the constructor. */ public TupleDesc getTupleDesc() { // some code goes here TupleDesc td = Database.getCatalog().getTupleDesc(tableid); Iterator<TDItem> tdIter = td.iterator(); int size = td.numFields(); Type[] typeAr = new Type[size]; String[] fieldAr = new String[size]; String aliasString = this.tableAlias; TDItem item; Type fieldType; String fieldName; int count = 0; if (aliasString == null) { aliasString = "null"; } // for (int i = 0; i < size; i++){ // item = tdIter.next(); // fieldType = item.fieldType; // fieldName = item.fieldName; while (tdIter.hasNext()) { item = tdIter.next(); fieldType = item.fieldType; fieldName = item.fieldName; if (fieldName == null) { fieldName = "null"; } typeAr[count] = fieldType; fieldAr[count] = aliasString + "." + fieldName; // "null.null case may occur" count++; } return new TupleDesc(typeAr, fieldAr); }
/** * Adds the specified tuple to the page; the tuple should be updated to reflect that it is now * stored on this page. * * @throws DbException if the page is full (no empty slots) or tupledesc is mismatch. * @param t The tuple to add. */ public void insertTuple(Tuple t) throws DbException { // some code goes here // not necessary for lab1 RecordId targetrid = t.getRecordId(); if (getNumEmptySlots() == 0 || !td.equals(t.getTupleDesc())) { throw new DbException("Either page is full or tuple desc doesn't match"); } for (int i = 0; i < getNumTuples(); i++) { if (!isSlotUsed(i)) { markSlotUsed(i, true); RecordId rid = new RecordId(pid, i); t.setRecordId(rid); tuples[i] = t; return; } } }
public Tuple getNext() throws NoSuchElementException, TransactionAbortedException { try { Tuple tuple = new Tuple(td); for (int i = 0; i < td.numFields(); i++) { IntField intf = IntField.createIntField(in.readInt()); tuple.setField(i, intf); } return tuple; } catch (EOFException eof) { throw new NoSuchElementException(eof.getMessage()); } catch (Exception e) { e.printStackTrace(); BufferPool.Instance().abortTransaction(tid); closeConnection(); throw new TransactionAbortedException(e); } }
/** * Helper constructor to create a heap page to be used as buffer. * * @param data - Data to be put in tmp page * @param td - Tuple description for the corresponding table. * @throws IOException */ private HeapPage(byte[] data, TupleDesc td) throws IOException { this.pid = null; // No need of a pid here as this page serves as a dummy buffer page and is meant to be // used independent of BufferManager. this.td = td; this.numSlots = (BufferPool.PAGE_SIZE * 8) / ((td.getSize() * 8) + 1); DataInputStream dis = new DataInputStream(new ByteArrayInputStream(data)); this.header = new Header(dis); try { // allocate and read the actual records of this page tuples = new Tuple[numSlots]; for (int i = 0; i < numSlots; i++) { tuples[i] = readNextTuple(dis, i); } } catch (NoSuchElementException e) { e.printStackTrace(); } dis.close(); }
/** * Create a HeapPage from a set of bytes of data read from disk. The format of a HeapPage is a set * of 32-bit header words indicating the slots of the page that are in use, plus * (BufferPool.PAGE_SIZE/tuple size) tuple slots, where tuple size is the size of tuples in this * database table, which can be determined via {@link Catalog#getTupleDesc}. * * <p>The number of 32-bit header words is equal to: * * <p>(no. tuple slots / 32) + 1 * * <p> * * @see Database#getCatalog * @see Catalog#getTupleDesc * @see BufferPool#PAGE_SIZE */ public HeapPage(HeapPageId id, byte[] data) throws IOException { this.pid = id; this.td = Database.getCatalog().getTupleDesc(id.tableid()); // this.numSlots = (BufferPool.PAGE_SIZE) / (td.getSize()); this.numSlots = (BufferPool.PAGE_SIZE * 8) / ((td.getSize() * 8) + 1); // System.out.println(this.numSlots); DataInputStream dis = new DataInputStream(new ByteArrayInputStream(data)); // allocate and read the header slots of this page header = new Header(dis); try { // allocate and read the actual records of this page tuples = new Tuple[numSlots]; for (int i = 0; i < numSlots; i++) { tuples[i] = readNextTuple(dis, i); } } catch (NoSuchElementException e) { // e.printStackTrace(); } dis.close(); }
/** * Retrieve the number of tuples on this page. * * @return the number of tuples on this page */ private int getNumTuples() { // hdj return BufferPool.PAGE_SIZE * 8 / (td.getSize() * 8 + 1); }
/** Ensures that combined's field names = td1's field names + td2's field names */ private boolean combinedStringArrays(TupleDesc td1, TupleDesc td2, TupleDesc combined) { for (int i = 0; i < td1.numFields(); i++) { if (!(((td1.getFieldName(i) == null) && (combined.getFieldName(i) == null)) || td1.getFieldName(i).equals(combined.getFieldName(i)))) { return false; } } for (int i = td1.numFields(); i < td1.numFields() + td2.numFields(); i++) { if (!(((td2.getFieldName(i - td1.numFields()) == null) && (combined.getFieldName(i) == null)) || td2.getFieldName(i - td1.numFields()).equals(combined.getFieldName(i)))) { return false; } } return true; }
/** * Retrieve the number of tuples on this page. * * @return the number of tuples on this page */ private int getNumTuples() { // some code goes here int tupsPerPage = (BufferPool.PAGE_SIZE * 8) / ((td.getSize() * 8) + 1); // default to be floor return tupsPerPage; }
/** * Create a new tuple with the specified schema (type). * * @param td the schema of this tuple. It must be a valid TupleDesc instance with at least one * field. */ public Tuple(TupleDesc td) { // some code goes here tupleSchema = td; recordId = null; tupleFields = new Field[td.numFields()]; }
/** Unit test for TupleDesc.combine() */ @Test public void combine() { TupleDesc td1, td2, td3; td1 = Utility.getTupleDesc(1, "td1"); td2 = Utility.getTupleDesc(2, "td2"); // test td1.combine(td2) td3 = TupleDesc.merge(td1, td2); assertEquals(3, td3.numFields()); assertEquals(3 * Type.INT_TYPE.getLen(), td3.getSize()); for (int i = 0; i < 3; ++i) assertEquals(Type.INT_TYPE, td3.getFieldType(i)); assertEquals(combinedStringArrays(td1, td2, td3), true); // test td2.combine(td1) td3 = TupleDesc.merge(td2, td1); assertEquals(3, td3.numFields()); assertEquals(3 * Type.INT_TYPE.getLen(), td3.getSize()); for (int i = 0; i < 3; ++i) assertEquals(Type.INT_TYPE, td3.getFieldType(i)); assertEquals(combinedStringArrays(td2, td1, td3), true); // test td2.combine(td2) td3 = TupleDesc.merge(td2, td2); assertEquals(4, td3.numFields()); assertEquals(4 * Type.INT_TYPE.getLen(), td3.getSize()); for (int i = 0; i < 4; ++i) assertEquals(Type.INT_TYPE, td3.getFieldType(i)); assertEquals(combinedStringArrays(td2, td2, td3), true); }
@Test public void testEquals() { TupleDesc singleInt = new TupleDesc(new Type[] {Type.INT_TYPE}); TupleDesc singleInt2 = new TupleDesc(new Type[] {Type.INT_TYPE}); TupleDesc intString = new TupleDesc(new Type[] {Type.INT_TYPE, Type.STRING_TYPE}); // .equals() with null should return false assertFalse(singleInt.equals(null)); // .equals() with the wrong type should return false assertFalse(singleInt.equals(new Object())); assertTrue(singleInt.equals(singleInt)); assertTrue(singleInt.equals(singleInt2)); assertTrue(singleInt2.equals(singleInt)); assertTrue(intString.equals(intString)); assertFalse(singleInt.equals(intString)); assertFalse(singleInt2.equals(intString)); assertFalse(intString.equals(singleInt)); assertFalse(intString.equals(singleInt2)); }
private SubTreeDescriptor buildTree( final int queryPlanDepth, final int currentDepth, final DbIterator queryPlan, final int currentStartPosition, final int parentUpperBarStartShift) { if (queryPlan == null) return null; int adjustDepth = currentDepth == 0 ? -1 : 0; SubTreeDescriptor thisNode = new SubTreeDescriptor(null, null); if (queryPlan instanceof SeqScan) { SeqScan s = (SeqScan) queryPlan; String tableName = s.getTableName(); String alias = s.getAlias(); // TupleDesc td = s.getTupleDesc(); if (!tableName.equals(alias)) alias = " " + alias; else alias = ""; thisNode.text = String.format("%1$s(%2$s)", SCAN, tableName + alias); if (SCAN.length() / 2 < parentUpperBarStartShift) { thisNode.upBarPosition = currentStartPosition + parentUpperBarStartShift; thisNode.textStartPosition = thisNode.upBarPosition - SCAN.length() / 2; } else { thisNode.upBarPosition = currentStartPosition + SCAN.length() / 2; thisNode.textStartPosition = currentStartPosition; } thisNode.width = thisNode.textStartPosition - currentStartPosition + thisNode.text.length(); int embedHeight = (queryPlanDepth - currentDepth) / 2 - 1; thisNode.height = currentDepth + 2 * embedHeight; int currentHeight = thisNode.height; SubTreeDescriptor parentNode = thisNode; for (int i = 0; i < embedHeight; i++) { parentNode = new SubTreeDescriptor(parentNode, null); parentNode.text = "|"; parentNode.upBarPosition = thisNode.upBarPosition; parentNode.width = thisNode.width; parentNode.height = currentHeight - 2; parentNode.textStartPosition = thisNode.upBarPosition; currentHeight -= 2; } thisNode = parentNode; } else { Operator plan = (Operator) queryPlan; DbIterator[] children = plan.getChildren(); if (plan instanceof Join) { Join j = (Join) plan; TupleDesc td = j.getTupleDesc(); JoinPredicate jp = j.getJoinPredicate(); String field1 = td.getFieldName(jp.getField1()); String field2 = td.getFieldName(jp.getField2() + children[0].getTupleDesc().numFields()); thisNode.text = String.format( "%1$s(%2$s),card:%3$d", JOIN, field1 + jp.getOperator() + field2, j.getEstimatedCardinality()); int upBarShift = parentUpperBarStartShift; if (JOIN.length() / 2 > parentUpperBarStartShift) upBarShift = JOIN.length() / 2; SubTreeDescriptor left = buildTree( queryPlanDepth, currentDepth + adjustDepth + 3, children[0], currentStartPosition, upBarShift); SubTreeDescriptor right = buildTree( queryPlanDepth, currentDepth + adjustDepth + 3, children[1], currentStartPosition + left.width + SPACE.length(), 0); thisNode.upBarPosition = (left.upBarPosition + right.upBarPosition) / 2; thisNode.textStartPosition = thisNode.upBarPosition - JOIN.length() / 2; thisNode.width = Math.max( left.width + right.width + SPACE.length(), thisNode.textStartPosition + thisNode.text.length() - currentStartPosition); thisNode.leftChild = left; thisNode.rightChild = right; thisNode.height = currentDepth; } else if (plan instanceof HashEquiJoin) { HashEquiJoin j = (HashEquiJoin) plan; JoinPredicate jp = j.getJoinPredicate(); TupleDesc td = j.getTupleDesc(); String field1 = td.getFieldName(jp.getField1()); String field2 = td.getFieldName(jp.getField2() + children[0].getTupleDesc().numFields()); thisNode.text = String.format( "%1$s(%2$s),card:%3$d", HASH_JOIN, field1 + jp.getOperator() + field2, j.getEstimatedCardinality()); int upBarShift = parentUpperBarStartShift; if (HASH_JOIN.length() / 2 > parentUpperBarStartShift) upBarShift = HASH_JOIN.length() / 2; SubTreeDescriptor left = buildTree( queryPlanDepth, currentDepth + 3 + adjustDepth, children[0], currentStartPosition, upBarShift); SubTreeDescriptor right = buildTree( queryPlanDepth, currentDepth + 3 + adjustDepth, children[1], currentStartPosition + left.width + SPACE.length(), 0); thisNode.upBarPosition = (left.upBarPosition + right.upBarPosition) / 2; thisNode.textStartPosition = thisNode.upBarPosition - HASH_JOIN.length() / 2; thisNode.width = Math.max( left.width + right.width + SPACE.length(), thisNode.textStartPosition + thisNode.text.length() - currentStartPosition); thisNode.leftChild = left; thisNode.rightChild = right; thisNode.height = currentDepth; } else if (plan instanceof Aggregate) { Aggregate a = (Aggregate) plan; int upBarShift = parentUpperBarStartShift; String alignTxt; TupleDesc td = a.getTupleDesc(); int gfield = a.groupField(); if (gfield == Aggregator.NO_GROUPING) { thisNode.text = String.format( "%1$s(%2$s),card:%3$d", a.aggregateOp(), a.aggregateFieldName(), a.getEstimatedCardinality()); alignTxt = td.getFieldName(00); } else { thisNode.text = String.format( "%1$s(%2$s), %3$s(%4$s),card:%5$d", GROUPBY, a.groupFieldName(), a.aggregateOp(), a.aggregateFieldName(), a.getEstimatedCardinality()); alignTxt = GROUPBY; } if (alignTxt.length() / 2 > parentUpperBarStartShift) upBarShift = alignTxt.length() / 2; SubTreeDescriptor child = buildTree( queryPlanDepth, currentDepth + 2 + adjustDepth, children[0], currentStartPosition, upBarShift); thisNode.upBarPosition = child.upBarPosition; thisNode.textStartPosition = thisNode.upBarPosition - alignTxt.length() / 2; thisNode.width = Math.max( child.width, thisNode.textStartPosition + thisNode.text.length() - currentStartPosition); thisNode.leftChild = child; thisNode.height = currentDepth; } else if (plan instanceof Filter) { Filter f = (Filter) plan; Predicate p = f.getPredicate(); thisNode.text = String.format( "%1$s(%2$s),card:%3$d", SELECT, children[0].getTupleDesc().getFieldName(p.getField()) + p.getOp() + p.getOperand(), f.getEstimatedCardinality()); int upBarShift = parentUpperBarStartShift; if (SELECT.length() / 2 > parentUpperBarStartShift) upBarShift = SELECT.length() / 2; SubTreeDescriptor child = buildTree( queryPlanDepth, currentDepth + 2 + adjustDepth, children[0], currentStartPosition, upBarShift); thisNode.upBarPosition = child.upBarPosition; thisNode.textStartPosition = thisNode.upBarPosition - SELECT.length() / 2; thisNode.width = Math.max( child.width, thisNode.textStartPosition + thisNode.text.length() - currentStartPosition); thisNode.leftChild = child; thisNode.height = currentDepth; } else if (plan instanceof OrderBy) { OrderBy o = (OrderBy) plan; thisNode.text = String.format( "%1$s(%2$s),card:%3$d", ORDERBY, children[0].getTupleDesc().getFieldName(o.getOrderByField()), o.getEstimatedCardinality()); int upBarShift = parentUpperBarStartShift; if (ORDERBY.length() / 2 > parentUpperBarStartShift) upBarShift = ORDERBY.length() / 2; SubTreeDescriptor child = buildTree( queryPlanDepth, currentDepth + 2 + adjustDepth, children[0], currentStartPosition, upBarShift); thisNode.upBarPosition = child.upBarPosition; thisNode.textStartPosition = thisNode.upBarPosition - ORDERBY.length() / 2; thisNode.width = Math.max( child.width, thisNode.textStartPosition + thisNode.text.length() - currentStartPosition); thisNode.leftChild = child; thisNode.height = currentDepth; } else if (plan instanceof Project) { Project p = (Project) plan; String fields = ""; Iterator<TDItem> it = p.getTupleDesc().iterator(); while (it.hasNext()) fields += it.next().fieldName + ","; fields = fields.substring(0, fields.length() - 1); thisNode.text = String.format("%1$s(%2$s),card:%3$d", PROJECT, fields, p.getEstimatedCardinality()); int upBarShift = parentUpperBarStartShift; if (PROJECT.length() / 2 > parentUpperBarStartShift) upBarShift = PROJECT.length() / 2; SubTreeDescriptor child = buildTree( queryPlanDepth, currentDepth + 2 + adjustDepth, children[0], currentStartPosition, upBarShift); thisNode.upBarPosition = child.upBarPosition; thisNode.textStartPosition = thisNode.upBarPosition - PROJECT.length() / 2; thisNode.width = Math.max( child.width, thisNode.textStartPosition + thisNode.text.length() - currentStartPosition); thisNode.leftChild = child; thisNode.height = currentDepth; } else if (plan.getClass() .getSuperclass() .getSuperclass() .getSimpleName() .equals("Exchange")) { String name = "Exchange"; int card = 0; try { name = (String) plan.getClass().getMethod("getName").invoke(plan); card = (Integer) plan.getClass().getMethod("getEstimatedCardinality").invoke(plan); } catch (Exception e) { e.printStackTrace(); } thisNode.text = String.format("%1$s,card:%2$d", name, card); int upBarShift = parentUpperBarStartShift; if (name.length() / 2 > parentUpperBarStartShift) upBarShift = name.length() / 2; SubTreeDescriptor child = buildTree( queryPlanDepth, currentDepth + 2 + adjustDepth, children[0], currentStartPosition, upBarShift); if (child == null) { thisNode.upBarPosition = upBarShift; thisNode.textStartPosition = thisNode.upBarPosition - name.length() / 2; thisNode.width = thisNode.textStartPosition + thisNode.text.length() - currentStartPosition; } else { thisNode.upBarPosition = child.upBarPosition; thisNode.textStartPosition = thisNode.upBarPosition - name.length() / 2; thisNode.width = Math.max( child.width, thisNode.textStartPosition + thisNode.text.length() - currentStartPosition); thisNode.leftChild = child; } thisNode.height = currentDepth; } else if (plan.getClass().getName().equals("simpledb.Rename")) { String newName = null; int fieldIdx = 0; try { newName = (String) plan.getClass().getMethod("newName", (Class<?>[]) null).invoke(plan); fieldIdx = (Integer) plan.getClass().getMethod("renamedField", (Class<?>[]) null).invoke(plan); } catch (Exception e) { e.printStackTrace(); } String oldName = plan.getChildren()[0].getTupleDesc().getFieldName(fieldIdx); thisNode.text = String.format( "%1$s,%2$s->%3$s,card:%4$d", RENAME, oldName, newName, plan.getEstimatedCardinality()); int upBarShift = parentUpperBarStartShift; if (RENAME.length() / 2 > parentUpperBarStartShift) upBarShift = RENAME.length() / 2; SubTreeDescriptor child = buildTree( queryPlanDepth, currentDepth + 2 + adjustDepth, children[0], currentStartPosition, upBarShift); if (child == null) { thisNode.upBarPosition = upBarShift; thisNode.textStartPosition = thisNode.upBarPosition - RENAME.length() / 2; thisNode.width = thisNode.textStartPosition + thisNode.text.length() - currentStartPosition; } else { thisNode.upBarPosition = child.upBarPosition; thisNode.textStartPosition = thisNode.upBarPosition - RENAME.length() / 2; thisNode.width = Math.max( child.width, thisNode.textStartPosition + thisNode.text.length() - currentStartPosition); thisNode.leftChild = child; } thisNode.height = currentDepth; } } return thisNode; }