#include<iostream>
using namespace std;
enum Leve{Freshman,Sophomore,Junior,Senior};
enum Grade{A,B,C,D};
class student {
public:
student(unsigned number, Leve leve, Grade grade):number(number),leve(leve),grade(grade){};
void show();
private:
unsigned number : 27;
Leve leve:2;
Grade grade:2;
};
void student::show() {
cout << "number:" << number <<"\tLeve:";
switch (leve) {
case Freshman:cout << "Freshman";
break;
case Sophomore:cout << "Sophomore";
break;
case 2:cout << "Junior" ;
break;
case 3:cout << "Senior";
}
cout << "\tGrade:";
switch (grade) {
case 0:cout << "A";
break;
case 1:cout << "B";
break;
case 2:cout << "C";
break;
case 3:cout << "D";
}
cout << endl;
}
int main() {
student a(4536978, Sophomore, A);
student b(54324783, Freshman, C);
student c(58953611, Junior, D);
cout << "The size of student is " << sizeof(a)<<endl;
a.show();
b.show();
c.show();
return 0;
}
Why can't my code correctly output 'C' information in the VS Code environment, but it works with GCC? It might be due to issues related to bitfields. Having done some research, it might be related to the bitfield padding mechanism, but I don't understand why.
By default the type of an
enumin MSVC isintGCC seems to default to an unsigned type (the default underlying type for an enum isn't set by the standard). As you've only assigned 2 bits to each field the range of storable values is-2to1. If you explicitly set the underlying type of the enum to an unsigned type then 2 bits will be sufficient to store your 4 values.