Hi I have written a program in c++ and I feel that it is a bit slow for very large datasets like > 1GB. Well I am not that confident in char* since I have just started coding in c++. So can anybody help me to make it more efficient and suggest by example snippets the changes that i need to make. To make it more efficient?
Also in this code I am pre fixing the number of fields in my tab separated file. Like here I have 14 fields in actual file and I am hard coding it. Now if a user gives any file with any number of fields how can I make it work in that case.
Here is my program
#include<iostream>
#include<fstream>
#include<string>
#include<sstream>
#include<map>
using namespace std;
int main(int argc, char* argv[]) {
multimap<string, string> mm;
string str[15];
string str1;
string str2;
string combined;
................
ifstream myfile(argv[1]);
while(! myfile.eof()){
for(int i =0; i < 15; i++)
if (i<14)getline(myfile, str[i], '\t');
else getline(myfile, str[i]);
str1.assign(str[1]);
str2.assign(str[2]);
str3.assign(str[3]);
str4.assign(str[4]);
.............................
int pos1 = str6.find( "words" );
if ( pos1 != string::npos )
str6.replace( pos1, 5, "" );
pos1 = str6.find( "words", pos1 + 1 );
combined = str2+"\t"+str3+"\t"+str4+"\t"+str6;
if((str9.compare(argv[2])==0)){
mm.insert(pair<string, string>(str5, combined));
}
}
for (multimap<string, string>::iterator it = mm.begin();it != mm.end();++it)
{
cout << (*it).second << "\t" << (*it).first << endl;
}
mm.clear();
return 0;
}
Thanks