/** Unit test for TupleDesc.combine() */ @Test public void combine() { TupleDesc td1, td2, td3; td1 = Utility.getTupleDesc(1, "td1"); td2 = Utility.getTupleDesc(2, "td2"); // test td1.combine(td2) td3 = TupleDesc.merge(td1, td2); assertEquals(3, td3.numFields()); assertEquals(3 * Type.INT_TYPE.getLen(), td3.getSize()); for (int i = 0; i < 3; ++i) assertEquals(Type.INT_TYPE, td3.getFieldType(i)); assertEquals(combinedStringArrays(td1, td2, td3), true); // test td2.combine(td1) td3 = TupleDesc.merge(td2, td1); assertEquals(3, td3.numFields()); assertEquals(3 * Type.INT_TYPE.getLen(), td3.getSize()); for (int i = 0; i < 3; ++i) assertEquals(Type.INT_TYPE, td3.getFieldType(i)); assertEquals(combinedStringArrays(td2, td1, td3), true); // test td2.combine(td2) td3 = TupleDesc.merge(td2, td2); assertEquals(4, td3.numFields()); assertEquals(4 * Type.INT_TYPE.getLen(), td3.getSize()); for (int i = 0; i < 4; ++i) assertEquals(Type.INT_TYPE, td3.getFieldType(i)); assertEquals(combinedStringArrays(td2, td2, td3), true); }
@Test public void new_getFieldTypeTest() { int length = 100; String name = "td"; TupleDesc td = Utility.getTupleDesc(length, name); // Lower than index bound. try { td.getFieldType(length + 1); fail("expected exception"); } catch (NoSuchElementException e) { } // Higher than index bound. try { td.getFieldType(-1); fail("expected exception"); } catch (NoSuchElementException e) { } // Check each name. for (int i = 0; i < length; i++) { assertEquals(Type.INT_TYPE, td.getFieldType(i)); } }
/** * Create a new TableStats object, that keeps track of statistics on each column of a table * * @param tableid The table over which to compute statistics * @param ioCostPerPage The cost per page of IO. This doesn't differentiate between * sequential-scan IO and disk seeks. */ public TableStats(int tableid, int ioCostPerPage) { // For this function, we use the DbFile for the table in question, // then scan through its tuples and calculate the values that you // to build the histograms. // TODO: Fill out the rest of the constructor. // Feel free to change anything already written, it's only a guideline this.ioCostPerPage = ioCostPerPage; DbFile file = Database.getCatalog().getDbFile(tableid); tupleDesc = file.getTupleDesc(); numPages = ((HeapFile) file).numPages(); numTuples = 0; int numFields = tupleDesc.numFields(); // TODO: what goes here? statistics = new ArrayList<Object>(); for (int i = 0; i < numFields; i++) { if (Type.INT_TYPE.equals(tupleDesc.getFieldType(i))) { statistics.add(new IntStatistics(NUM_HIST_BINS)); } else { statistics.add(new StringHistogram(NUM_HIST_BINS)); } } final DbFileIterator iter = file.iterator(null); try { iter.open(); while (iter.hasNext()) { Tuple t = iter.next(); numTuples++; // TODO: and here? for (int i = 0; i < numFields; i++) { if (Type.INT_TYPE.equals(tupleDesc.getFieldType(i))) { ((IntStatistics) statistics.get(i)).addValue(((IntField) t.getField(i)).getValue()); } else { ((StringHistogram) statistics.get(i)) .addValue(((StringField) t.getField(i)).getValue()); } } } iter.close(); } catch (DbException e) { e.printStackTrace(); } catch (TransactionAbortedException e) { e.printStackTrace(); } }
/** Suck up tuples from the source file. */ private Tuple readNextTuple(DataInputStream dis, int slotId) throws NoSuchElementException { // if associated bit is not set, read forward to the next tuple, and // return null. if (!isSlotUsed(slotId)) { for (int i = 0; i < td.getSize(); i++) { try { dis.readByte(); } catch (IOException e) { throw new NoSuchElementException("error reading empty tuple"); } } return null; } // read fields in the tuple Tuple t = new Tuple(td); RecordId rid = new RecordId(pid, slotId); t.setRecordId(rid); try { for (int j = 0; j < td.numFields(); j++) { Field f = td.getFieldType(j).parse(dis); t.setField(j, f); } } catch (java.text.ParseException e) { e.printStackTrace(); throw new NoSuchElementException("parsing error!"); } return t; }
/** Unit test for TupleDesc.getType() */ @Test public void getType() { int[] lengths = new int[] {1, 2, 1000}; for (int len : lengths) { TupleDesc td = Utility.getTupleDesc(len); for (int i = 0; i < len; ++i) assertEquals(Type.INT_TYPE, td.getFieldType(i)); } }
/** * Estimate the selectivity of predicate <tt>field op constant</tt> on the table. * * @param field The field over which the predicate ranges * @param op The logical operation in the predicate * @param constant The value against which the field is compared * @return The estimated selectivity (fraction of tuples that satisfy) the predicate */ public double estimateSelectivity(int field, Predicate.Op op, Field constant) { // TODO: some code goes here if (Type.INT_TYPE.equals(tupleDesc.getFieldType(field))) { return ((IntStatistics) statistics.get(field)) .estimateSelectivity(op, ((IntField) constant).getValue()); } else { return ((StringHistogram) statistics.get(field)) .estimateSelectivity(op, ((StringField) constant).getValue()); } }
/** * Returns the TupleDesc with field names from the underlying HeapFile, prefixed with the * tableAlias string from the constructor. This prefix becomes useful when joining tables * containing a field(s) with the same name. * * @return the TupleDesc with field names from the underlying HeapFile, prefixed with the * tableAlias string from the constructor. */ public TupleDesc getTupleDesc() { TupleDesc tup = Database.getCatalog().getTupleDesc(id); int length = tup.numFields(); String[] field = new String[length]; Type[] types = new Type[length]; for (int i = 0; i < length; i++) { types[i] = tup.getFieldType(i); field[i] = alias + "." + tup.getFieldName(i); } return new TupleDesc(types, field); }
private void createAliasedTd() { Catalog gc = Database.getCatalog(); TupleDesc old_td = gc.getTupleDesc(tableid); String[] newFieldAr = new String[old_td.numFields()]; Type[] typeAr = new Type[old_td.numFields()]; String field = null; for (int i = 0; i < newFieldAr.length; i++) { field = old_td.getFieldName(i); if (alias == null) { alias = "null"; } else if (field == null) { field = "null"; } newFieldAr[i] = alias + "." + field; typeAr[i] = old_td.getFieldType(i); } td = new TupleDesc(typeAr, newFieldAr); }