Hii everyone:)
I have written this code to do conversion between decimal to any base between 2 and 26....
But this is not working properly
ca any one help me:confused

#include<iostream.h>
#include<conio.h>

void conversion(int no,int base)
{
	int arr[10]={0};
	int i=0;
	while(no>0)
	{
		arr[i]=no%base;
		i++;
		no=no/base;

	}
	for(int j=i;j>=0;j--)
		cout<<arr[j];
}
void conversionGreater(int no,int base)
{
	int arr[20]={0};
	int i=0,j=0;
	char a='A';

	while(no>0)
	{
		arr[i]=no%base;
		i++;
		no=no/base;

		if(no>=10)
		{
			j=no%base;

			for(int k=0;k<j;k++)
				arr[i]=a+k;
			i++;
			no=no/base;
		}

	}
	for(int l=i;l>=0;l--)
		cout<<arr[l];
}
void main()
{
	clrscr();
	int base=16;
	int no=2600;
	if(base >=2 && base<=9)
		conversion(no,base);
	else if(base >=11 && base<=26)
		conversionGreater(no,base);
	getch();
}

I need help urgently......
thanx in advance

if(no>=10)
		{
			j=no%base;

			for(int k=0;k<j;k++)
				arr[i]=a+k;
			i++;
			no=no/base;
		}

I would like to know what are you trying to do here. FIrstly you are converting the alphabet character 'A' into an int. ANd then secondly after the for loop is done it would just add (a+j) in the end.

Secondly could you post an example of the input and the required output.

Be a part of the DaniWeb community

We're a friendly, industry-focused community of developers, IT pros, digital marketers, and technology enthusiasts meeting, networking, learning, and sharing knowledge.