/* Am not able to understand the operation happening in function showbits(), please help*/
#include <stdio.h>
void showbits (unsigned char);
int main()
{
unsigned char num=225,j,k;
printf(\nDecimal %d is same as binary", num);
showbits (num);
for(j=0; j<=4; j++)
{
k=num<<j;
printf (\n%d left shift %d gives", num, j);
showbits(k);
}
return 0;
}
void showbits (unsigned char n)
{
int i;
unsigned char j,k, andmask;
for(i=7; i>=o; i--)
{
j=i;
andmask = 1 << j;
k = n & andmask; /* what happens during this instruction */
k == 0 ? printf("0") : printf("!"); /*and here too, pls help.*/
}
}
surfingturtle 0 Light Poster
deceptikon 1,790 Code Sniper Team Colleague Featured Poster
surfingturtle 0 Light Poster
ddanbe 2,724 Professional Procrastinator Featured Poster
surfingturtle 0 Light Poster
ddanbe 2,724 Professional Procrastinator Featured Poster
vegaseat 1,735 DaniWeb's Hypocrite Team Colleague
surfingturtle 0 Light Poster
ddanbe 2,724 Professional Procrastinator Featured Poster
surfingturtle 0 Light Poster
Be a part of the DaniWeb community
We're a friendly, industry-focused community of developers, IT pros, digital marketers, and technology enthusiasts meeting, networking, learning, and sharing knowledge.