int main() { cout << hex; cout << (0xe & 0x3); // 1110 & 0011 -> 0010 (AND) cout << endl; cout << (0xe | 0x3); // 1110 | 0011 -> 1111 (OR) cout << endl; cout << (0xe ^ 0x3); // 1110 ^ 0011 -> 1101 (XOR) return 0; } When using cout, it displays the translation (2, f, and d) vs the actual value (0010, 1111, and 1101). How do I make it so that it display this vs what the bit goes with?