Define a getStandardDeviation() function with the specification and prototype shown below:
// Calculate the population standard deviation of the scores
// That means...
// 1) For each score i, add pow(scores[i] - mean,2) to a running sum.
// 2) divide sum by the number of scores. That's the variance.
// 3) Return the square root of the variance.
// Assume that numScores > 0
double getStandardDeviation( double scores[], int numScores, double mean) ;
Here is the driver (main()) used to test my function.
#include <iostream>
#include <iomanip>
#include <cmath>
using namespace std;
const int MAX_SCORES = 10; // Maximum number of scores
double getStandardDeviation( double scores[], int numScores, double mean) ;
int main() {
double scores[MAX_SCORES];
int scoreCount, skip;
cin >> scoreCount;
cin >> skip;
scoreCount = min(scoreCount, MAX_SCORES);
int i;
for (i = 0; i < scoreCount; i++)
scores[i] = i*skip;
double mean = (i-1)*skip/2.0; // e.g. mean of 0,1,2,3,4,5 is (6-1)/2.0 = 2.5
cout << fixed << setprecision(2);
cout << getStandardDeviation(scores, scoreCount,mean) << endl;
return 0;
}
Here is my solution:
double getStandardDeviation( double scores[], int numScores, double mean)
{
int sum;
int variance;
for(int i = 0; i < numScores; i++)
{
sum += pow(scores[i] - mean,2);
}
variance = sum / numScores;
return sqrt(variance);
}
Here is my output:
Input: 2 1
Output: 0.00
Here is the expected output:
Input: 2 1
Output: 0.50
Am I doing the calculation correctly? What lines of code should I change?