Before you read, I am using Mac's Xcode. In this program I am attempting to print the maximum and minimum values of standard C variables with every variation of modifiers. The unsigned int uses 4 bytes therefore it must have a maximum value of 4294967296 but it only display 2147483647 as its maximum value. My goal is to have xcode print an unsigned integers maximum value which is 4294967296; am I doing anything incorrectly in the program?
You can focus on the second line of variables declared in main() and the last 2 printf functions.
#include <stdio.h>
#include <stdbool.h>
#include <stdlib.h>
int main (int argc, const char * argv[])
{
signed int minSignedInt={-2147483648},maxSignedInt={2147483647};
unsigned int minUnsignedInt={0},maxUnsignedInt={4294967296};
printf( "sizeof( bool ) = %d\n", (int)sizeof( bool ) );
printf( "sizeof( char ) = %d\n", (int)sizeof( char ) );
printf( "sizeof( short ) = %d\n", (int)sizeof( short ) );
printf( "sizeof( int ) = %d\n", (int)sizeof( int ) );
printf( "sizeof( float ) = %d\n", (int)sizeof( float ) );
printf( "sizeof( long ) = %d\n", (int)sizeof( long ) );
printf( "sizeof( double ) = %d\n", (int)sizeof( double ) );
printf( "sizeof( long double ) = %d\n\n", (int)sizeof( long double ) );
printf("sizeof( rand() ) is %d\n",(int)sizeof(rand()));
printf("Max. signed int (or integer) value = %d\n",maxSignedInt);
printf("Min. signed int (or integer) value = %d\n\n",minSignedInt);
printf("Max. Unsigned int (or integer) value = %d\n",maxUnsignedInt);
printf("Min. Unsigned int (or integer) value = %d\n\n",minUnsignedInt);
}