Documenti di Didattica
Documenti di Professioni
Documenti di Cultura
*;
import java.util.*;
public class HuffMain {
public static PriorityQueue<Node> q;
public static HashMap<Character, String> charToCode;
public static HashMap<String, Character> codeToChar;
@SuppressWarnings("resource")
public static void main(String[] args) throws FileNotFoundException {
// Read all the contents of the file
String text = new Scanner(new File("../text.txt")).useDelimiter("\\A").n
ext();
// Count the frequency of all the characters
HashMap<Character, Integer> dict = new HashMap<Character, Integer>();
for(int i = 0; i < text.length(); i++) {
char a = text.charAt(i);
if(dict.containsKey(a))
dict.put(a, dict.get(a)+1);
else
dict.put(a, 1);
}
// Create a forest (group of trees) by adding all the nodes to a priorit
y queue
q = new PriorityQueue<Node>(100, new FrequencyComparator());
int n = 0;
for(Character c : dict.keySet()) {
q.add(new Node(c, dict.get(c)));
n++;
}
Node root = huffmain(n);
buildTable(root);
String compressed = compress(text);
saveToFile(compressed);
String decompressed = decompress(compressed);
writeToFile(decompressed);
}
// This method builds the tree based on the frequency of every characters
public static Node huffmain(int n) {
Node x, y;
for(int i = 1; i <= n-1; i++) {
Node z = new Node();
z.left = x = q.poll();
z.right = y = q.poll();
z.freq = x.freq + y.freq;
q.add(z);
}
return q.poll();
}
// This method builds the table for the compression and decompression
public static void buildTable(Node root) {
charToCode = new HashMap<Character, String>();
codeToChar = new HashMap<String, Character>();
postorder(root, new String());
}
a Node Class
{
char alpha;
int freq;