I have a python code that reads a file into a stringio. This works great but now i need to do is http post from the stringio and i can not figure it out. Any help would be greatly appriciated cause this is really conffusing and i dont know what to do. The url is a dummy website but this is my full code maybe someone could help.
import requests, shutil, datetime, glob, os, csv,fnmatch, StringIO
def convertToSor(fileLoc, fileName, mode, listName):
s=StringIO.StringIO()
os.chdir(fileLoc)
for FILE in glob.glob("SOR935*"):
filepresent = os.path.isfile('/home/hatterx/Desktop/Bronto_Files/FACTS_bronto_import_add_'+dt+'.csv')
with open(FILE, 'r') as f1: #, open('/home/hatterx/Desktop/Bronto_Files/FACTS_bronto_import_add_'+dt+'.csv', 'ab') as f2, open('/home/hatterx/Desktop/Bronto_Files/FACTS_bronto_import_update_'+dt+'.csv', 'ab') as f3:
cf1 = csv.DictReader(f1, fieldnames=('CustNo1', 'CustNo2', 'LastOrderDate', 'LastOrderAmount', 'FirstName', 'LastName', 'UserNo', 'EmailAddress', 'Franchise', 'PrevOrderDate', 'PrevOrderAmount', 'State', 'ZIP', 'Amt1', 'Amt2', 'Amt3', 'SalesPerson', 'WEBID'))
cf2 = csv.DictWriter(s, new_field_names)
cf3 = csv.DictReader(f1, fieldnames=('CustNo1', 'CustNo2', 'LastOrderDate', 'LastOrderAmount', 'FirstName', 'LastName', 'UserNo', 'EmailAddress', 'Franchise', 'PrevOrderDate', 'PrevOrderAmount', 'State', 'ZIP', 'Amt1', 'Amt2', 'Amt3', 'SalesPerson', 'WEBID'))
cf4 = csv.DictWriter(s, update_field_names)
if False == filepresent:
cf2.writeheader()
cf4.writeheader()
for row in cf1:
nr = newrow
nr['Last Sale Date'] = row['LastOrderDate'].strip()
nr['Last Sale Amount'] = row['LastOrderAmount'].strip()
nr['First Name'] = row['FirstName'].strip()
nr['Last Name'] = row['LastName'].strip()
nr['Email'] = row['EmailAddress'].strip().split(',',1)[0]
if nr['Email'] == '':
continue
fr_name = row['Franchise'].strip()
if fr_name in franchiseList:
nr['Franchise'] = franchiseList[fr_name]['FRANCHISE Name'].strip()
if nr['Franchise'] == 'IGNORE':
continue
nr['osg_web_dir'] = franchiseList[fr_name]['FRANCHISE Name - Directory'].strip()
if nr['osg_web_dir'] == '':
nr['osg_web_dir'] = 'shop'
else:
nr['Franchise'] = 'SHOP'
nr['osg_web_dir'] = 'shop'
nr['State'] = row['State'].strip()
nr['Postal/Zip Code'] = row['ZIP'].strip()
nr['Last Web Order ID'] = row['WEBID'].strip()
nr['Date Added'] = datetime.date.today().strftime('%m/%d/%Y')
nr['Email Source'] = 'FACTSauto'
s.write(nr)
ur = updaterow
ur['Last Sale Date'] = row['LastOrderDate'].strip()
ur['Last Sale Amount'] = row['LastOrderAmount'].strip()
ur['First Name'] = row['FirstName'].strip()
ur['Last Name'] = row['LastName'].strip()
ur['Email'] = row['EmailAddress'].strip().split(',',1)[0]
if ur['Email'] == '':
continue
fr_name = row['Franchise'].strip()
if fr_name in franchiseList:
ur['Franchise'] = franchiseList[fr_name]['FRANCHISE Name'].strip()
if ur['Franchise'] == 'IGNORE':
continue
ur['osg_web_dir'] = franchiseList[fr_name]['FRANCHISE Name - Directory'].strip()
if ur['osg_web_dir'] == '':
ur['osg_web_dir'] = 'shop'
else:
ur['Franchise'] = 'SHOP'
ur['osg_web_dir'] = 'shop'
ur['State'] = row['State'].strip()
ur['Postal/Zip Code'] = row['ZIP'].strip()
ur['Last Web Order ID'] = row['WEBID'].strip()
s.write(ur)
#s.seek(0)
#s.read()
print s.getvalue()
shutil.move(FILE, "/home/hatterx/Desktop/Processed/"+ dt +"_"+ FILE)
#return True
url = 'test.com'
files = {'filename': (fileName, open(fileLoc, 'rb'))}#, 'application/octet-stream')}
data = {'source': 'FACTS Daily Import', 'site_id': 'xxxxx', 'user_id': 'xxxxx', 'key': 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx', 'format': 'csv', 'action':mode, 'listname': listName, }
#headers = {'content-type': 'multipart/form-data'}
res = requests.post(url, files=files, data=data)
if res.status_code == requests.codes.ok:
return True
else:
print res.status_code
print res.text
#print res.raise_for_status()
return False
franchiseList = {}
with open('/home/hatterx/Desktop/Franchise_Name_Scrub_List.csv', 'r') as ff:
fcf = csv.DictReader(ff)
for frow in fcf:
franchiseList[frow['Misc Franchise Name']] = frow
with open('/home/hatterx/Desktop/Franchise_Name_Scrub_List.csv', 'r') as fF:
fcf = csv.DictReader(fF)
for Frow in fcf:
franchiseList[Frow['FRANCHISE Name - Directory']] = Frow
newrow={'Last Sale Date': '', 'Last Sale Amount': '', 'First Name': '', 'Last Name': '', 'Email': '', 'Franchise': '', 'State': '', 'Postal/Zip Code': '', 'Last Web Order ID': '', 'Date Added': '', 'Email Source':'', 'osg_web_dir': ''}
updaterow={'Last Sale Date': '', 'Last Sale Amount': '', 'First Name': '', 'Last Name': '', 'Email': '', 'Franchise': '', 'State': '', 'Postal/Zip Code': '', 'Last Web Order ID': '', 'osg_web_dir': ''}
new_field_names = newrow.keys()
update_field_names = updaterow.keys()
dt = datetime.datetime.now().strftime("%m_%d_%y_%H_%M_%S")
filestart = '/home/hatterx/Desktop/Unprocessed/'
fileprefix = '/home/hatterx/Desktop/Bronto_Files/'
fileprocs = '/home/hatterx/Desktop/Processed/'
filemove = '/home/hatterx/Desktop/Uploaded/'
listTest = 'TEST List For API Integration'
listFACTS = 'FACTS Daily Import'
listOSG = 'OSG Special Offers Send Monthly'
for File in os.listdir(filestart):
if fnmatch.fnmatch(File, 'SOR935*'):
cts = convertToSor(filestart)
for File in os.listdir(fileprefix):
if fnmatch.fnmatch(File, 'FACTS_bronto_import_add_*.csv'):
res = importToBronto(File, fileprefix+File, 'add', listTest)
if True == res:
shutil.move(fileprefix+File, filemove+ dt+"_"+File)
print 'Sucsessful Add File Upload'
else:
print 'Failed to Import Add File'
elif fnmatch.fnmatch(File, 'FACTS_bronto_import_update_*.csv'):
res = importToBronto(File, fileprefix+File, 'update', listTest)
if True == res:
shutil.move(fileprefix+File, filemove+ dt+"_"+File)
print 'Sucsessful Update File Upload'
else:
print 'Failed to Import Update File'