This code calculated the variance to be 9.75. It really is 9.3, what am I doing wrong?
static double computeVariance(ref double Variance, params int[] intValues)
{
int sum = 0;
for (int n = 0; n < intValues.Length; n++)
{
sum += intValues[n];
}
double avg = sum / intValues.Length;
double varSum = 0;
for (int v = 0; v < intValues.Length; v++)
{
double diff = ((double)intValues[v] - avg);
varSum += (diff * diff);
}
Variance = varSum / (intValues.Length - 1);
return Variance;
}
static void Main(string[] args)
{
double myInts = 0;
double Variance = computeVariance(ref myInts, 1, 5, 2, 7, 8);
Console.WriteLine("The variance is " + Variance);
Console.ReadKey();