/** Construct the Huffman coding tree. */ private void createTree() { ArrayList<HuffNode> ar = new ArrayList<HuffNode>(); for (int i = 0; i <= BitUtils.DIFF_BYTES; i++) if (theCounts.getCount(i) > 0) { HuffNode newNode = new HuffNode(i, theCounts.getCount(i), null, null, null); theNodes[i] = newNode; ar.add(newNode); } theNodes[END] = new HuffNode(END, 1, null, null, null); ar.add(theNodes[END]); while (ar.size() > 1) { HuffNode left = ar.remove(ar.size() - 1); HuffNode right = ar.remove(ar.size() - 1); HuffNode node = new HuffNode(INCOMPLETE_CODE, left.weight + right.weight, left, right, null); left.parent = right.parent = node; ar.add(node); } root = ar.remove(0); }
/** * Writes an encoding table to an output stream. Format is character (1 byte), count (4 bytes). A * zero count terminates the encoding table. */ public void writeEncodingTable(DataOutputStream out) throws IOException { for (int i = 0; i <= BitUtils.DIFF_BYTES; i++) { if (theCounts.getCount(i) > 0) { // Voor files met alleen a t/m e worden hier de ASCII waarden // 97 t/m 101 geschreven. Als je de file opent dan kun je de // characters lezen. out.writeByte(i); // De tellingen worden als bits geschreven (zie API DataOutputStream) dus niet // 'leesbaar' als je de file opent. out.writeInt(theCounts.getCount(i)); } } out.writeByte( 0); // write a 0 byte followed by a 0 int to signify the end of the encoding table in the // file. out.writeInt(0); }
/** * Read the encoding table (the char counts) from an input stream in format given above and then * construct the Huffman tree. Stream will then be positioned to read compressed data. */ public void readEncodingTable(DataInputStream in) throws IOException { for (int i = 0; i <= BitUtils.DIFF_BYTES; i++) // set all counts to 0 theCounts.setCount(i, 0); int ch; int num; while (true) { // loop forever, stops if at any points we read a 0 (which signifies the end of // the encoding table). ch = in.readByte() & 0xff; // to properly encode characters with a byte value of more than 128. num = in.readInt(); if (num == 0) break; theCounts.setCount(ch, num); } createTree(); }
/** * Create a new TreeMaker object. * * @param stream input stream * @throws IOException sth wrong with IO * @throws Exception sth wrong otherwise */ public TreeMaker(BitInputStream stream) throws IOException, Exception { // Using CharCounter to get necessary values cc = new CharCounter(); streamSize = cc.countAll(stream); size = cc.getNonZeroCharCount(); // Creating HuffTree and HuffBaseNode arrays to store things HuffArr = new HuffTree[size + 1]; newHuffArr = new HuffBaseNode[size + 1]; int j = 0; // Creating individual HuffTree elements for (int i = 0; i < cc.getSize(); i++) { if (cc.getCount(i) != 0) { HuffArr[j] = new HuffTree(i, cc.getCount(i)); // System.out.println((char)i + ":" + cc.getCount(i)); j++; } } // Last HuffArr element is the Pseudo_eof HuffArr[size] = new HuffTree(IHuffConstants.PSEUDO_EOF, 1); // Creating a minheap out of the HuffArr array Heap = new MinHeap(HuffArr, size + 1, 2 * (size + 1)); // Get root from helper method root = buildTree(); // Initializing index as 0 index = 0; // Go from root to get all of the leaf nodes // Put the leaf nodes to newHuffArr realTree(root.root()); // Set codings for nodes codings(root.root()); }
/** Get encoded part size */ private int getEncodedSize() { int ges = 0; try { for (int i = 0; i < size + 1; i++) { int val = ((HuffLeafNode) newHuffArr[i]).element(); if (val != PSEUDO_EOF) { ges += newHuffArr[i].getCode().length() * cc.getCount(val); } else { ges += newHuffArr[i].getCode().length(); } } } catch (Exception e) { System.out.println( "Sth is wrong with getting" + " the size of the encoded files" + "Size of encoding array exceeds limit"); e.printStackTrace(); } return ges; }