Hi team. I have the below code working 100% on my local machine. It makes the required updates and I am happy with it. But when I try to run it from the server, it gives the attached error message.
Please ignore all other lines(I have just removed other lines for this question) and have a look at line 37 to 45, then line 218

I just put this together guy, I am really just fairly new in python

import os
import datetime
import time
import logging

import urllib
import urllib2
import httplib
import httplib2
import webbrowser
import traceback


sys.path.append('****')
import SFTPSync
if platform.system().upper() == 'WINDOWS':
    DBHOST='ReportingServicesServer'
else:
    DBHOST='ReportingServicesServer:1433'
DBUSER='tibco'
DBPW='tibco529t'

globalPrimeServer = 'xxx'

username = 'xx'
password = './globalprimepython.ppk'

srcBaseFolder = 'C:/ClientReports/PRimeDB/DailyExtracts'
dstTempFolder = 'C:/ClientReports/Temp/globalprime/%s'
dstBaseFolder = '/export/data/gpfs/reports/SAReport/'


('TrialBalance','TrialBalance','pdf','xls','csv'),
('TrialBalanceSummary','TrialBalanceSummary','pdf','xls','csv'))

def updateprimepage():
    try:
        import urllib
        cobdate = string.replace(sys.argv[1], '-', '')
        f = urllib.urlopen('http://xxx.xx.xx.xxx:18080/optionupload/OptionReportOndemandServlet?dirpath=%%2Fexport%%2Fdata%%2Fgpfs%%2Freports%%2FSAReport&cobdate=%s&submit=submit&operation=submit' %cobdate)
        print f.read()
    
    except IOError:
        print 'Cannot open URL %s for reading' % f
        
def getpreviousbusinessday(date):
     
    con = pymssql.connect(host=DBHOST,user=DBUSER,password=DBPW,database='xxx')
            
    result = True
    try:
        cur = con.cursor()
        
        query="select dbo.fn_PreviousBusinessDay('%s', 121, 10)" % date
        cur.execute(query)
        
        result = cur.fetchone()[0]
        
                
    finally:        
        con.close
            
    return result

STATUS_WAITING = 0
STATUS_RUNNING = 1
STATUS_COMPLETE = 2
STATUS_FAILED = 3

class ZipUtilities:
    zip_file = None
    zip_filename = None;
    def createZip(self, filename):
        self.zip_file = zipfile.ZipFile(filename, 'w',compression=zipfile.ZIP_DEFLATED)
        self.zip_filename = filename
    def getFilename(self):
        return self.zip_filename
            
    def toZip(self, file, arcname):
        self.zip_file.write(file, arcname=arcname)
        
    def unZipRemotely(self, host, password, filename, dstDirectory):
        client = paramiko.SSHClient()
        try:                        
            key = paramiko.RSAKey.from_private_key_file(password)
            client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
            client.connect(hostname=host, username='xx', pkey=key,look_for_keys=False,allow_agent=False)             
            #stdin, stdout, stderr = client.exec_command('cd %s; unzip -o %s; rm %s' % (dstDirectory, filename, filename))
            stdin, stdout, stderr = client.exec_command('cd %s; unzip -o %s > /dev/null; rm %s' % (dstDirectory, filename, filename))
        finally:
            client.close()
            
    def close(self):
        self.zip_file.close()
        
            
def updatestatus(key,status):

    con = pymssql.connect(host=DBHOST,user=DBUSER,password=DBPW,database='xxx')
    
    try:
        cur = con.cursor()    
        query = "UPDATE [RSL_Queue] SET [UploadStatus] = %(UploadStatus)s WHERE [key] = %(key)s" % {'UploadStatus': status, 'key': key}
        cur.execute(query)
        con.commit()
    finally:        
        con.close
        
                
def getCportIDs():
    
    con = pymssql.connect(host=DBHOST,user=DBUSER,password=DBPW,database='xxx')
    result = dict()
    try:
        print 'getting cportids from FundMap table in xxx'
        
        cur = con.cursor()
        query="SELECT GenevaID, CPortID FROM FundMap"        
        cur.execute(query)
        row = cur.fetchone();
        while row:                       
            result.update({row[0]:row[1]})
            row = cur.fetchone();
            
                          
                
    finally:        
        con.close
            
    return result

result = 0
try:
    
    extractDate = string.replace(sys.argv[1], '-', '')
    queuePortfolio = sys.argv[2]
    queueID = sys.argv[3]
    
    cPortIDs = getCportIDs()    
    sftp = SFTPSync.sftpsync(username=username, password=password, host=globalPrimeServer, logpath='c:/Scripts/Python/syncreportstoglobalprime/logs/', loglevel=logging.DEBUG, logfilename="globalprimereportsync")
    
    
    try:
        updatestatus(queueID, STATUS_RUNNING)
        #update files to global prime        
        sftp.log_write('Global Prime Report Sync: Syncing %(ftpfolder)s on %(host)s as user %(user)s to %(localfolder)s for extractDate %(extractDate)s' % {'ftpfolder': dstBaseFolder, 'host': globalPrimeServer, 'user': username, 'localfolder': srcBaseFolder, 'extractDate': extractDate})
        tempDir = dstTempFolder % queueID 
        makedir(tempDir)
        filecount = 0
        zip = ZipUtilities()       
        zip.createZip(tempDir + '\\' + queueID + '.zip')
        try:  
            for folder in FOLDERS:
                srcPath = '%(srcBaseFolder)s/%(extractDate)s/%(extractFolder)s' % {'srcBaseFolder': srcBaseFolder, 'extractDate': extractDate, 'extractFolder': folder[0]}
                
                sftp.log_write('Source path constructed as %(srcPath)s' % {'srcPath': srcPath})
                
                dstPathDaily = '%(extractDate)s' % {'extractDate': extractDate}
                #sftp.log_write('Destination path for daily constructed as %(dstPathDaily)s' % {'dstPathDaily': dstPathDaily})
                dstPathMonthly = 'MonthEnd/%(extractDate)s' % {'extractDate': extractDate}
                
                sftp.log_write('Destination path for monthly constructed as %(dstPathMonthly)s' % {'dstPathMonthly': dstPathMonthly})
                
                if os.path.exists(srcPath) :
                    reportList = []                
                    reportList = os.listdir(srcPath)
                    
                    for srcFilename in reportList:
                        portfolioID = srcFilename[0:srcFilename.index(extractDate)-1]
                        cportID = cPortIDs.get(portfolioID)                                            
                        formats = srcFilename.split('.')
                        format = formats[len(formats)-1].lower()
                        
                        if queuePortfolio == 'ALL_CUSTOM' or queuePortfolio == portfolioID:
                                                
                            if cportID != None:
                                if format in (folder[2],folder[3], folder[4]) :
                                    filecount = filecount + 1
                                    sftp.log_write('processing file %(srcFilename)s with cportid: %(cportid)s' % {'srcFilename': srcFilename, 'cportid': cportID})
                                    #sftp.log_write('blah')                                    
                                    if srcFilename.upper().find('_MONTHLY.')  > -1 :
                                        srcFile = '%(srcPath)s/%(srcFilename)s' % {'srcPath': srcPath, 'srcFilename': srcFilename}                                                  
                                        srcFileDone = 'C:/Scripts/Python/syncreportstoglobalprime/report.done'
                                        dstFile = '%(dstPathMonthly)s/%(dstFilename)s-%(formatU)s_LE-%(cportID)s_%(extractDate)s.%(format)s' % {'dstPathMonthly': dstPathMonthly, 'dstFilename': folder[1], 'formatU': format.upper(), 'cportID': cportID, 'extractDate': extractDate, 'format': format}
                                        dstFileDone = '%(dstPathMonthly)s/%(dstFilename)s-%(formatU)s_LE-%(cportID)s_%(extractDate)s.done' % {'dstPathMonthly': dstPathMonthly, 'dstFilename': folder[1], 'formatU': format.upper(), 'cportID': cportID, 'extractDate': extractDate}   
                                        zip.toZip(srcFile, dstFile)
                                        zip.toZip(srcFileDone, dstFileDone)
                                        #os.system ("copy %s %s" % (string.replace(srcFileDone, "/" , "\\"), string.replace(dstFileDone, "/" , "\\")))                                        
                                    else:
                                        srcFile = '%(srcPath)s/%(srcFilename)s' % {'srcPath': srcPath, 'srcFilename': srcFilename} 
                                        srcFileDone = 'C:/Scripts/Python/syncreportstoglobalprime/report.done'
                                        dstFile = '%(dstPathDaily)s/%(dstFilename)s-%(formatU)s_LE-%(cportID)s_%(extractDate)s.%(format)s' % {'dstPathDaily': dstPathDaily, 'dstFilename': folder[1], 'formatU': format.upper(), 'cportID': cportID, 'extractDate': extractDate, 'format': format}
                                        dstFileDone = '%(dstPathDaily)s/%(dstFilename)s-%(formatU)s_LE-%(cportID)s_%(extractDate)s.done' % {'dstPathDaily': dstPathDaily, 'dstFilename': folder[1], 'formatU': format.upper(), 'cportID': cportID, 'extractDate': extractDate}
                                        zip.toZip(srcFile, dstFile)
                                        zip.toZip(srcFileDone, dstFileDone)
                                                                                
            zip.close()                    
            if filecount > 0 :   
                flist = []
                remoteFilename = dstBaseFolder + queueID + '.zip'
                flist.append((zip.getFilename(), remoteFilename))                
                
                sftp.log_write('Uploading files to %s' % globalPrimeServer)
                errorlist = sftp.uploadfiles(flist)
            
                #sftp.login(sftp.host, sftp.username, sftp.password)
                #sftp.sftp.get_channel().exec_command('ls -ltrah')
                    
                errorlist = []   
                if len(errorlist) > 0:
                    sftp.log_error('Not all files could be uploaded')
                    for err in errorlist:
                        sftp.log_error(err)
                    result = 1
                else:
                    zip.unZipRemotely(globalPrimeServer, password, remoteFilename, dstBaseFolder)
                    print updateprimepage()    
            else:
                sftp.log_write('no files to upload, either no files or cportids have matched')
                        
            updatestatus(queueID,STATUS_COMPLETE)
                                     
        except Exception, e:
            updatestatus(queueID,STATUS_FAILED)
            sftp.log_error('Error checking for files: %s: %s' % (e.__class__, e))
            result = 1
            zip.close()
    finally:
        sftp.close()
            
        
except Exception, e:
    updatestatus(queueID,STATUS_FAILED) 
    print 'Error checking for files: %s: %s' % (e.__class__, e)
    result = 1
    
sys.exit(result)

It would have been more helpful if you copied the actual traceback, but it looks like in your code:

def updateprimepage():
    try:
        import urllib
        cobdate = string.replace(sys.argv[1], '-', '')
        f = urllib.urlopen('http://xxx.xx.xx.xxx:18080/optionupload/OptionReportOndemandServlet?dirpath=%%2Fexport%%2Fdata%%2Fgpfs%%2Freports%%2FSAReport&cobdate=%s&submit=submit&operation=submit' %cobdate)
        print f.read()
 
    except IOError:
        print 'Cannot open URL %s for reading' % f

If an IOError is raised 'f' will never be created and your exception handler references it.

ie:

try:
   f = 1/0
except ZeroDivisionError, e:
   # f doesn't exist here
   # so referencing it would
   # raise UnboundLocalError
   print 'zero division error'

"f" is a local variable and does not exist outside of the function unless you return it (which you don't). Also, line 218, print updateprimepage(), will print None because nothing is returned from the function.

Be a part of the DaniWeb community

We're a friendly, industry-focused community of developers, IT pros, digital marketers, and technology enthusiasts meeting, networking, learning, and sharing knowledge.