Thread: [Pypt-offline-general] SF.net SVN: pypt-offline: [124] trunk/pypt_core.py
Status: Beta
                
                Brought to you by:
                
                    riteshsarraf
                    
                
            | 
      
      
      From: <rit...@us...> - 2007-02-04 15:06:48
       | 
| Revision: 124
          http://svn.sourceforge.net/pypt-offline/?rev=124&view=rev
Author:   riteshsarraf
Date:     2007-02-04 07:06:48 -0800 (Sun, 04 Feb 2007)
Log Message:
-----------
* Some path checks
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-01-28 20:16:55 UTC (rev 123)
+++ trunk/pypt_core.py	2007-02-04 15:06:48 UTC (rev 124)
@@ -473,7 +473,7 @@
             raw_data_list = open(url_file, 'r').readlines()
         except IOError, (errno, strerror):
             log.err("%s %s\n" %(errno, strerror))
-            errfunc(errno, '')
+            errfunc(errno, '', url_file)
             
         #INFO: Mac OS X in mis-behaving with Python Threading
         # Use the conventional model for Mac OS X
@@ -764,23 +764,29 @@
                 sys.exit(0)
                
         if pypt_variables.options.fetch_update:
-            log.msg("\nFetching uris which update apt's package database\n\n")
-           
-            pypt_variables.options.disable_md5check = True
-            # Since we're in fetch_update, the download_type will be non-deb/rpm data
-            # 1 is for update packages 
-            # 2 is for upgrade packages
-            fetcher(pypt_variables.options.fetch_update, pypt_variables.options.download_dir, pypt_variables.options.cache_dir, pypt_variables.options.zip_it, pypt_variables.options.zip_update_file, 1)
+            if os.access(pypt_variables.options.fetch_update, os.F_OK):
+                log.msg("\nFetching uris which update apt's package database\n\n")
+            	pypt_variables.options.disable_md5check = True
+            	# Since we're in fetch_update, the download_type will be non-deb/rpm data
+            	# 1 is for update packages 
+            	# 2 is for upgrade packages
+            	fetcher(pypt_variables.options.fetch_update, pypt_variables.options.download_dir, pypt_variables.options.cache_dir, pypt_variables.options.zip_it, pypt_variables.options.zip_update_file, 1)
+            else:
+                log.err("\n%s file not present. Check path.\n" % (pypt_variables.options.fetch_update) )
+                sys.exit(1)
                  
         if pypt_variables.options.fetch_upgrade:
-            log.msg("\nFetching packages which need upgradation\n\n")
+            if os.access(pypt_variables.options.fetch_upgrade, os.F_OK):
+                log.msg("\nFetching packages which need upgradation\n\n")
+            	# Since we're in fetch_update, the download_type will be non-deb/rpm data
+            	# 1 is for update packages 
+            	# 2 is for upgrade packages
+            	fetcher(pypt_variables.options.fetch_upgrade, pypt_variables.options.download_dir, pypt_variables.options.cache_dir, pypt_variables.options.zip_it, pypt_variables.options.zip_upgrade_file, 2)
+            	sys.exit(0)
+            else:
+                log.err("\n%s file not present. Check path.\n" % (pypt_variables.options.fetch_upgrade) )
+                sys.exit(1)
                  
-            # Since we're in fetch_update, the download_type will be non-deb/rpm data
-            # 1 is for update packages 
-            # 2 is for upgrade packages
-            fetcher(pypt_variables.options.fetch_upgrade, pypt_variables.options.download_dir, pypt_variables.options.cache_dir, pypt_variables.options.zip_it, pypt_variables.options.zip_upgrade_file, 2)
-            sys.exit(0)
-                 
         if pypt_variables.options.install_update:
             #INFO: Comment these lines to do testing on Windows machines too
             if os.geteuid() != 0:
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-03-13 19:36:21
       | 
| Revision: 129
          http://svn.sourceforge.net/pypt-offline/?rev=129&view=rev
Author:   riteshsarraf
Date:     2007-03-13 12:36:22 -0700 (Tue, 13 Mar 2007)
Log Message:
-----------
* Minor naming fixes
* logging fixes
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-03-13 09:28:26 UTC (rev 128)
+++ trunk/pypt_core.py	2007-03-13 19:36:22 UTC (rev 129)
@@ -528,7 +528,7 @@
                     else:
                         log.msg("\r%s %s done.\n" % (file, "    ") )
                         if zip_bool:
-                            if archive.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
+                            if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
                                 log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_update_file) )
                                 os.unlink(os.path.join(download_path, file) ) # Remove it because we don't need the file once it is zipped.
                                 sys.exit(1)
@@ -538,21 +538,20 @@
                     if cache_dir is None:
                         log.msg("Downloading %s - %d KB\n" % (file, size/1024))
                         if download_from_web(url, file, download_path, FetcherInstance) != True:
-                            pypt_variables.errlist.append(file)
+                            errlist.append(file)
                             if zip_bool:
                                 log.msg("\r%s %s done.\n" % (file, "    "))
-                                archive.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
+                                FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                 os.unlink(os.path.join(download_path, file))
                     else:
-                        if copy_first_match(cache_dir, file, download_path, checksum) == False:
+                        if find_first_match(cache_dir, file, download_path, checksum) == False:
                             log.msg("Downloading %s - %d KB\n" % (file, size/1024))
                             if download_from_web(url, file, download_path, FetcherInstance) != True:
                                  errlist.append(file)
                             else:
                                 log.msg("\r%s %s done.\n" % (file, "    "))
                                 if os.access(os.path.join(cache_dir, file), os.F_OK):
-                                    log.debug("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir)) #INFO: The file is already there.
-                                    log.verbose("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir))
+                                    log.verbose("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir) ) #INFO: The file is already there.
                                 else:
                                     if os.access(cache_dir, os.W_OK):
                                         shutil.copy(file, cache_dir)
@@ -561,11 +560,11 @@
                                         log.verbose("Cannot copy %s to %s. Is %s writeable??\n" % (file, cache_dir))
                                         
                                 if zip_bool:
-                                    archive.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
+                                    FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                     os.unlink(os.path.join(download_path, file))
                         elif True:
                             if zip_bool:
-                                archive.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
+                                FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                 os.unlink(os.path.join(download_path, file))
                                 
                 else:
@@ -575,7 +574,6 @@
         #INFO: Thread Support
         if ArgumentOptions.num_of_threads > 1:
             log.msg("WARNING: Threads is still in beta stage. It's better to use just a single thread at the moment.\n\n")
-            log.warn("Threads is still in beta stage. It's better to use just a single thread at the moment.\n\n")
             
         def run(request, response, func=find_first_match):
             '''Get items from the request Queue, process them
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-03-16 10:57:22
       | 
| Revision: 130
          http://svn.sourceforge.net/pypt-offline/?rev=130&view=rev
Author:   riteshsarraf
Date:     2007-03-16 03:51:03 -0700 (Fri, 16 Mar 2007)
Log Message:
-----------
* Damn! Never was the lock really being acquired. Fixed now.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-03-13 19:36:22 UTC (rev 129)
+++ trunk/pypt_core.py	2007-03-16 10:51:03 UTC (rev 130)
@@ -174,7 +174,7 @@
         
         try:
             if self.lock:
-                self.ZipLock.acquire()
+                self.ZipLock.acquire(True)
             
             filename = zipfile.ZipFile(zip_file_name, "a")
         except IOError:
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-03-22 08:49:42
       | 
| Revision: 131
          http://svn.sourceforge.net/pypt-offline/?rev=131&view=rev
Author:   riteshsarraf
Date:     2007-03-22 01:49:43 -0700 (Thu, 22 Mar 2007)
Log Message:
-----------
* Make download_from_web a class
* Make ProgressBar class a base class for DownloadFromWeb class because progress bar
is a required feature for any downloader.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-03-16 10:51:03 UTC (rev 130)
+++ trunk/pypt_core.py	2007-03-22 08:49:43 UTC (rev 131)
@@ -275,63 +275,68 @@
                 return os.path.join(path, file)
             return False
             
-def download_from_web(url, file, download_dir, ProgressBarInstance):
-    '''
-    Download the required file from the web
-    The arguments are passed everytime to the function so that,
-    may be in future, we could reuse this function
-    '''
-       
-    try:
-        block_size = 4096
-        i = 0
-        counter = 0
-        
-        os.chdir(download_dir)
-        temp = urllib2.urlopen(url)
-        headers = temp.info()
-        size = int(headers['Content-Length'])
-        data = open(file,'wb')
-        
-        ProgressBarInstance.addItem(size)
+class DownloadFromWeb(ProgressBar):
+    
+    def __init__(self, width):
+        ProgressBar.__init__(self, width=width)
+    
+    def download_from_web(self, url, file, download_dir):
+        '''
+        Download the required file from the web
+        The arguments are passed everytime to the function so that,
+        may be in future, we could reuse this function
+        '''
+           
+        try:
+            block_size = 4096
+            i = 0
+            counter = 0
             
- 
-        while i < size:
-            data.write (temp.read(block_size))
-            increment = min(block_size, size - i)
-            i += block_size
-            counter += 1
-            ProgressBarInstance.updateValue(increment)
-        ProgressBarInstance.completed()
-        data.close()
-        temp.close()
+            os.chdir(download_dir)
+            temp = urllib2.urlopen(url)
+            headers = temp.info()
+            size = int(headers['Content-Length'])
+            data = open(file,'wb')
+            
+            #INFO: Add the download thread into the Global ProgressBar Thread
+            self.addItem(size)
+     
+            while i < size:
+                data.write (temp.read(block_size))
+                increment = min(block_size, size - i)
+                i += block_size
+                counter += 1
+                self.updateValue(increment)
+            self.completed()
+            data.close()
+            temp.close()
+            
+            return True
+            
+        #FIXME: Find out optimal fix for this exception handling
+        except OSError, (errno, strerror):
+            #log.err("%s\n" %(download_dir))
+            errfunc(errno, strerror, download_dir)
+            
+        except urllib2.HTTPError, errstring:
+            #log.err("%s\n" % (file))
+            errfunc(errstring.code, errstring.msg, file)
+            
+        except urllib2.URLError, errstring:
+            #We pass error code "1" here becuase URLError
+            # doesn't pass any error code.
+            # URLErrors shouldn't be ignored, hence program termination
+            if errstring.reason.args[0] == 10060:
+                errfunc(errstring.reason.args[0], errstring.reason, url)
+            #errfunc(1, errstring.reason)
+            #pass
         
-        return True
+        except IOError, e:
+            if hasattr(e, 'reason'):
+                log.err("%s\n" % (e.reason))
+            if hasattr(e, 'code') and hasattr(e, 'reason'):
+                errfunc(e.code, e.reason, file)
         
-    #FIXME: Find out optimal fix for this exception handling
-    except OSError, (errno, strerror):
-        #log.err("%s\n" %(download_dir))
-        errfunc(errno, strerror, download_dir)
-        
-    except urllib2.HTTPError, errstring:
-        #log.err("%s\n" % (file))
-        errfunc(errstring.code, errstring.msg, file)
-        
-    except urllib2.URLError, errstring:
-        #We pass error code "1" here becuase URLError
-        # doesn't pass any error code.
-        # URLErrors shouldn't be ignored, hence program termination
-        if errstring.reason.args[0] == 10060:
-            errfunc(errstring.reason.args[0], errstring.reason, url)
-        #errfunc(1, errstring.reason)
-        #pass
-    
-    except IOError, e:
-        if hasattr(e, 'reason'):
-            log.err("%s\n" % (e.reason))
-        if hasattr(e, 'code') and hasattr(e, 'reason'):
-            errfunc(e.code, e.reason, file)
-        
 def files(root): 
     for path, folders, files in os.walk(root): 
         for file in files: 
@@ -448,9 +453,9 @@
     cache_dir = ArgumentOptions.cache_dir
     zip_bool = ArgumentOptions.zip_it
     
-    class FetcherClass(ProgressBar, Archiver, MD5Check):
+    class FetcherClass(DownloadFromWeb, Archiver, MD5Check):
         def __init__(self, width, lock):
-            ProgressBar.__init__(self, width=width)
+            DownloadFromWeb.__init__(self, width=width)
             #ProgressBar.__init__(self, width)
             #self.width = width
             Archiver.__init__(self, lock=lock)
@@ -523,7 +528,7 @@
                 log.msg("Downloading %s\n" % (file) ) 
                 
                 if key == 'Update':
-                    if download_from_web(url, file, download_path, FetcherInstance) != True:
+                    if FetcherInstance.download_from_web(url, file, download_path) != True:
                         errlist.append(file)
                     else:
                         log.msg("\r%s %s done.\n" % (file, "    ") )
@@ -537,7 +542,7 @@
                 elif key == 'Upgrade':
                     if cache_dir is None:
                         log.msg("Downloading %s - %d KB\n" % (file, size/1024))
-                        if download_from_web(url, file, download_path, FetcherInstance) != True:
+                        if FetcherInstance.download_from_web(url, file, download_path) != True:
                             errlist.append(file)
                             if zip_bool:
                                 log.msg("\r%s %s done.\n" % (file, "    "))
@@ -546,7 +551,7 @@
                     else:
                         if find_first_match(cache_dir, file, download_path, checksum) == False:
                             log.msg("Downloading %s - %d KB\n" % (file, size/1024))
-                            if download_from_web(url, file, download_path, FetcherInstance) != True:
+                            if FetcherInstance.download_from_web(url, file, download_path) != True:
                                  errlist.append(file)
                             else:
                                 log.msg("\r%s %s done.\n" % (file, "    "))
@@ -605,7 +610,7 @@
                     if exit_status == False:
                         log.msg("Downloading %s\n" % (file) ) 
                         
-                        if download_from_web(url, file, download_path, FetcherInstance) == True:
+                        if FetcherInstance.download_from_web(url, file, download_path) == True:
                             log.msg("%s done.\n" % (file) )
                             if zip_bool:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
@@ -634,7 +639,7 @@
                             else:
                                 log.verbose("%s MD5 checksum mismatch. Skipping file.\n" % (file) )
                                 log.msg("Downloading %s - %d KB\n" % (file, download_size/1024) )
-                                if download_from_web(url, file, download_path, FetcherInstance) == True:
+                                if FetcherInstance.download_from_web(url, file, download_path) == True:
                                     log.msg("%s done.\n" % (file) )
                                     if ArgumentOptions.cache_dir:
                                         try:
@@ -658,7 +663,7 @@
                     else:
                         log.verbose("%s not available in local cache %s.\n" % (file, ArgumentOptions.cache_dir) )
                         log.msg("Downloading %s - %d KB\n" % (file, download_size/1024) )
-                        if download_from_web(url, file, download_path, FetcherInstance) == True:
+                        if FetcherInstance.download_from_web(url, file, download_path) == True:
                             if ArgumentOptions.disable_md5check is False:
                                 if FetcherInstance.md5_check(full_file_path, checksum) is True:
                                             
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-03-22 09:57:08
       | 
| Revision: 132
          http://svn.sourceforge.net/pypt-offline/?rev=132&view=rev
Author:   riteshsarraf
Date:     2007-03-22 02:57:06 -0700 (Thu, 22 Mar 2007)
Log Message:
-----------
* Removing the --debug option
* Adding color support on Windows using WConio
* Removed duplicate function definition of files() (is used for os.walk by find_first_match() )
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-03-22 08:49:43 UTC (rev 131)
+++ trunk/pypt_core.py	2007-03-22 09:57:06 UTC (rev 132)
@@ -108,6 +108,21 @@
     You should pass these options, taking it from optparse/getopt,
     during instantiation'''
     
+    ''' WConio can provide simple coloring mechanism for Microsoft Windows console
+    Color Codes:
+    Black = 0
+    Green = 2
+    Red = 4
+    White = 15
+    Light Red = 12
+    Light Cyan = 11
+    '''
+    
+    try:
+        import WConio
+    except ImportError:
+        WindowColor = False
+    
     def __init__(self, warnings, verbose, debug):
         
         if warnings is True:
@@ -123,32 +138,32 @@
         else: self.DEBUG = False
         
     def msg(self, msg):
+        if self.WindowColor:
+            WConio.textcolor(15)
         sys.stdout.write(msg)
         sys.stdout.flush()
         
     def err(self, msg):
+        if self.WindowColor:
+            WConio.textcolor(4)
         sys.stderr.write(msg)
         sys.stderr.flush()
     
     # For the rest, we need to check the options also
     def warn(self, msg):
         if self.WARN is True:
-        #if options.warnings is True:
+            if self.WindowColor:
+                WConio.textcolor(12)
             sys.stderr.write(msg)
             sys.stderr.flush()
 
     def verbose(self, msg):
         if self.VERBOSE is True:
-        #if options.verbose is True:
+            if self.WindowColor:
+                WConio.textcolor(11)
             sys.stdout.write(msg)
             sys.stdout.flush()
             
-    def debug(self, msg):
-        if self.DEBUG is True:
-        #if options.debug is True:
-            sys.stdout.write(msg)
-            sys.stdout.flush()
-            
 class Archiver:
     def __init__(self, lock=None):
         if lock is None or lock != 1:
@@ -257,7 +272,7 @@
             return False
 
 
-def files(self, root): 
+def files(root): 
     for path, folders, files in os.walk(root): 
         for file in files: 
             yield path, file 
@@ -336,11 +351,6 @@
                 log.err("%s\n" % (e.reason))
             if hasattr(e, 'code') and hasattr(e, 'reason'):
                 errfunc(e.code, e.reason, file)
-        
-def files(root): 
-    for path, folders, files in os.walk(root): 
-        for file in files: 
-            yield path, file 
 
 def copy_first_match(cache_dir, filename, dest_dir, checksum): # aka new_walk_tree_copy() 
     '''Walks into "reposiotry" looking for "filename".
@@ -822,7 +832,6 @@
                       action="store", type="string", metavar=".")
     parser.add_option("--verbose", dest="verbose", help="Enable verbose messages", action="store_true")
     parser.add_option("--warnings", dest="warnings", help="Enable warnings", action="store_true")
-    parser.add_option("--debug", dest="debug", help="Enable Debug mode", action="store_true")
     parser.add_option("-u","--uris", dest="uris_file",
                       help="Full path of the uris file which contains the main database of files to be downloaded",action="store", type="string")
     parser.add_option("","--disable-md5check", dest="disable_md5check",
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-03-22 10:06:02
       | 
| Revision: 133
          http://svn.sourceforge.net/pypt-offline/?rev=133&view=rev
Author:   riteshsarraf
Date:     2007-03-22 03:06:03 -0700 (Thu, 22 Mar 2007)
Log Message:
-----------
* Oh!! How did I miss to clean this up.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-03-22 09:57:06 UTC (rev 132)
+++ trunk/pypt_core.py	2007-03-22 10:06:03 UTC (rev 133)
@@ -123,7 +123,7 @@
     except ImportError:
         WindowColor = False
     
-    def __init__(self, warnings, verbose, debug):
+    def __init__(self, warnings, verbose):
         
         if warnings is True:
             self.WARN = True
@@ -133,10 +133,6 @@
             self.VERBOSE = True
         else: self.VERBOSE = False
         
-        if debug is True:
-            self.DEBUG = True
-        else: self.DEBUG = False
-        
     def msg(self, msg):
         if self.WindowColor:
             WConio.textcolor(15)
@@ -882,7 +878,7 @@
         # The log implementation
         # Instantiate the class
         global log
-        log = Log(options.warnings, options.verbose, options.debug)
+        log = Log(options.warnings, options.verbose)
         
         log.msg("pypt-offline %s\n" % (version))
         log.msg("Copyright %s\n" % (copyright))
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-03-22 11:27:30
       | 
| Revision: 135
          http://svn.sourceforge.net/pypt-offline/?rev=135&view=rev
Author:   riteshsarraf
Date:     2007-03-22 04:27:31 -0700 (Thu, 22 Mar 2007)
Log Message:
-----------
* More color beautification
* Added a new Log method success, which will print success messages
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-03-22 11:02:57 UTC (rev 134)
+++ trunk/pypt_core.py	2007-03-22 11:27:31 UTC (rev 135)
@@ -109,7 +109,7 @@
     '''A OOP implementation for logging.
     warnings is to tackle the warning option
     verbose is to tackle the verbose option
-    debug is to tackle the debug option
+    color is if you want to colorize your output
     
     You should pass these options, taking it from optparse/getopt,
     during instantiation'''
@@ -137,19 +137,29 @@
         self.color = color
         
     def msg(self, msg):
+        'Print general messages'
         if self.color:
             WConio.textcolor(15)
         sys.stdout.write(msg)
         sys.stdout.flush()
         
     def err(self, msg):
+        'Print messages with an error'
         if self.color:
             WConio.textcolor(4)
         sys.stderr.write(msg)
         sys.stderr.flush()
+        
+    def success(self, msg):
+        'Print messages with a success'
+        if self.color:
+            WConio.textcolor(2)
+        sys.stdout.write(msg)
+        sys.stdout.flush()
     
     # For the rest, we need to check the options also
     def warn(self, msg):
+        'Print warnings'
         if self.WARN is True:
             if self.color:
                 WConio.textcolor(12)
@@ -157,6 +167,7 @@
             sys.stderr.flush()
 
     def verbose(self, msg):
+        'Print verbose messages'
         if self.VERBOSE is True:
             if self.color:
                 WConio.textcolor(11)
@@ -540,7 +551,7 @@
                     if FetcherInstance.download_from_web(url, file, download_path) != True:
                         errlist.append(file)
                     else:
-                        log.msg("\r%s %s done.\n" % (file, "    ") )
+                        log.success("\r%s %s done.\n" % (file, "    ") )
                         if zip_bool:
                             if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
                                 log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_update_file) )
@@ -554,7 +565,7 @@
                         if FetcherInstance.download_from_web(url, file, download_path) != True:
                             errlist.append(file)
                             if zip_bool:
-                                log.msg("\r%s %s done.\n" % (file, "    "))
+                                log.success("\r%s %s done.\n" % (file, "    "))
                                 FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                 os.unlink(os.path.join(download_path, file))
                     else:
@@ -563,7 +574,7 @@
                             if FetcherInstance.download_from_web(url, file, download_path) != True:
                                  errlist.append(file)
                             else:
-                                log.msg("\r%s %s done.\n" % (file, "    "))
+                                log.success("\r%s %s done.\n" % (file, "    "))
                                 if os.access(os.path.join(cache_dir, file), os.F_OK):
                                     log.verbose("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir) ) #INFO: The file is already there.
                                 else:
@@ -620,7 +631,7 @@
                         log.msg("Downloading %s\n" % (file) ) 
                         
                         if FetcherInstance.download_from_web(url, file, download_path) == True:
-                            log.msg("%s done.\n" % (file) )
+                            log.success("%s done.\n" % (file) )
                             if zip_bool:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
                                     log.err("Couldn't archive %s to file %s.\n" % (file, ArgumentOptions.zip_update_file) )
@@ -639,11 +650,11 @@
                             if FetcherInstance.md5_check(full_file_path, checksum) is True:
                                 if zip_bool:
                                     if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, full_file_path) is True:
-                                        log.msg("%s copied from local cache directory %s\n" % (file, cache_dir) )
+                                        log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
                                 else:
                                     try:
                                         shutil.copy(full_file_path, download_path)
-                                        log.msg("%s copied from local cache directory %s\n" % (file, cache_dir) )
+                                        log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
                                     except shutil.Error:
                                         log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, download_path) )
                                         
@@ -651,7 +662,7 @@
                                 log.verbose("%s MD5 checksum mismatch. Skipping file.\n" % (file) )
                                 log.msg("Downloading %s - %d KB\n" % (file, download_size/1024) )
                                 if FetcherInstance.download_from_web(url, file, download_path) == True:
-                                    log.msg("%s done.\n" % (file) )
+                                    log.success("%s done.\n" % (file) )
                                     if ArgumentOptions.cache_dir:
                                         try:
                                             shutil.copy(file, cache_dir)
@@ -668,7 +679,7 @@
                             #INFO: If md5check is disabled, just copy it.
                             try:
                                 shutil.copy(full_file_path, download_path)
-                                log.msg("%s copied from local cache directory %s\n" % (file, cache_dir) )
+                                log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
                             except shutil.Error:
                                 log.verbose("%s already available in dest_dir. Skipping copy!!!\n\n" % (file) )
                     else:
@@ -697,7 +708,7 @@
                                     sys.exit(1)
                                 log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                 os.unlink(os.path.join(download_path, file) )
-                            log.msg("%s done.\n" % (file) )
+                            log.success("%s done.\n" % (file) )
                         else:
                             log.err("Couldn't find %s\n" % (file) )
                             errlist.append(file)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-05-09 13:22:39
       | 
| Revision: 141
          http://svn.sourceforge.net/pypt-offline/?rev=141&view=rev
Author:   riteshsarraf
Date:     2007-05-09 06:22:41 -0700 (Wed, 09 May 2007)
Log Message:
-----------
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-05-08 13:27:28 UTC (rev 140)
+++ trunk/pypt_core.py	2007-05-09 13:22:41 UTC (rev 141)
@@ -778,7 +778,6 @@
     # Print the failed files
     if len(errlist) == 0:
         log.msg("\nAll files have been downloaded.\n")
-        pass # Don't print if nothing failed.
     else:
         log.err("\n\nThe following files failed to be downloaded.\n")
         for error in errlist:
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-05-10 14:48:34
       | 
| Revision: 142
          http://svn.sourceforge.net/pypt-offline/?rev=142&view=rev
Author:   riteshsarraf
Date:     2007-05-10 07:48:35 -0700 (Thu, 10 May 2007)
Log Message:
-----------
* Minor changes
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-05-09 13:22:41 UTC (rev 141)
+++ trunk/pypt_core.py	2007-05-10 14:48:35 UTC (rev 142)
@@ -466,10 +466,6 @@
         log.err("%s\n" % (errormsg))
         sys.exit(errno)
     elif errno == 504 or errno == 404 or errno == 10060:
-        #TODO: Counter which will inform that some packages weren't fetched.
-        # A counter needs to be implemented which will at the end inform the list of sources which 
-        # failed to be downloaded with the above codes.
-        
         # 504 is for gateway timeout
         # On gateway timeouts we can keep trying out becuase
         # one apt source.list might have different hosts.
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-05-10 20:19:06
       | 
| Revision: 143
          http://svn.sourceforge.net/pypt-offline/?rev=143&view=rev
Author:   riteshsarraf
Date:     2007-05-10 13:19:07 -0700 (Thu, 10 May 2007)
Log Message:
-----------
* Beautification to use the Package Name instead of the full length file name.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-05-10 14:48:35 UTC (rev 142)
+++ trunk/pypt_core.py	2007-05-10 20:19:07 UTC (rev 143)
@@ -568,6 +568,7 @@
             for item in FetchData.get(key):
                 
                 (url, file, download_size, checksum) = stripper(each_single_item)
+                PackageName = file.split("_")[0]
                 log.msg("Downloading %s\n" % (file) ) 
                 
                 if key == 'Update':
@@ -586,18 +587,18 @@
                     if cache_dir is None:
                         log.msg("Downloading %s - %d KB\n" % (file, size/1024))
                         if FetcherInstance.download_from_web(url, file, download_path) != True:
-                            errlist.append(file)
+                            errlist.append(PackageName)
                             if zip_bool:
-                                log.success("\r%s %s done.\n" % (file, "    "))
+                                log.success("\r%s %s done.\n" % (PackageName, "    "))
                                 FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                 os.unlink(os.path.join(download_path, file))
                     else:
                         if find_first_match(cache_dir, file, download_path, checksum) == False:
-                            log.msg("Downloading %s - %d KB\n" % (file, size/1024))
+                            log.msg("Downloading %s - %d KB\n" % (PackageName, size/1024))
                             if FetcherInstance.download_from_web(url, file, download_path) != True:
-                                 errlist.append(file)
+                                 errlist.append(PackageName)
                             else:
-                                log.success("\r%s %s done.\n" % (file, "    "))
+                                log.success("\r%s %s done.\n" % (PackageName, "    "))
                                 if os.access(os.path.join(cache_dir, file), os.F_OK):
                                     log.verbose("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir) ) #INFO: The file is already there.
                                 else:
@@ -636,6 +637,7 @@
                     break
                 (key, item) = tuple_item_key
                 (url, file, download_size, checksum) = stripper(item)
+                PackageName = file.split("_")[0]
                 thread_name = threading.currentThread().getName()
                 
                 if key == 'Update':
@@ -673,19 +675,19 @@
                             if FetcherInstance.md5_check(full_file_path, checksum) is True:
                                 if zip_bool:
                                     if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, full_file_path) is True:
-                                        log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
+                                        log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
                                 else:
                                     try:
                                         shutil.copy(full_file_path, download_path)
-                                        log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
+                                        log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
                                     except shutil.Error:
                                         log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, download_path) )
                                         
                             else:
                                 log.verbose("%s MD5 checksum mismatch. Skipping file.\n" % (file) )
-                                log.msg("Downloading %s - %d KB\n" % (file, download_size/1024) )
+                                log.msg("Downloading %s - %d KB\n" % (PackageName, download_size/1024) )
                                 if FetcherInstance.download_from_web(url, file, download_path) == True:
-                                    log.success("%s done.\n" % (file) )
+                                    log.success("%s done.\n" % (PackageName) )
                                     if ArgumentOptions.cache_dir:
                                         try:
                                             shutil.copy(file, cache_dir)
@@ -707,7 +709,7 @@
                                 log.verbose("%s already available in dest_dir. Skipping copy!!!\n\n" % (file) )
                     else:
                         log.verbose("%s not available in local cache %s.\n" % (file, ArgumentOptions.cache_dir) )
-                        log.msg("Downloading %s - %d KB\n" % (file, download_size/1024) )
+                        log.msg("Downloading %s - %d KB\n" % (PackageName, download_size/1024) )
                         if FetcherInstance.download_from_web(url, file, download_path) == True:
                             if ArgumentOptions.disable_md5check is False:
                                 if FetcherInstance.md5_check(full_file_path, checksum) is True:
@@ -731,10 +733,10 @@
                                     sys.exit(1)
                                 log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                 os.unlink(os.path.join(download_path, file) )
-                            log.success("%s done.\n" % (file) )
+                            log.success("\n%s done.\n" % (PackageName) )
                         else:
-                            log.err("Couldn't find %s\n" % (file) )
-                            errlist.append(file)
+                            #log.err("Couldn't find %s\n" % (PackageName) )
+                            errlist.append(PackageName)
                     
         # Create two Queues for the requests and responses
         requestQueue = Queue.Queue()
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-05-12 23:05:00
       | 
| Revision: 144
          http://svn.sourceforge.net/pypt-offline/?rev=144&view=rev
Author:   riteshsarraf
Date:     2007-05-12 16:04:14 -0700 (Sat, 12 May 2007)
Log Message:
-----------
* Simplified error code handling. Major error codes are now in a list
* Some little display simplification. Print a user friendly package name
* Fixed the ugly problem of progressbar display. Now the progressbar status is overwritten with spaces (" "*40 chars) to make it look blank
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-05-10 20:19:07 UTC (rev 143)
+++ trunk/pypt_core.py	2007-05-12 23:04:14 UTC (rev 144)
@@ -454,30 +454,25 @@
     This function does the job of behaving accordingly
     as per the error codes.
     '''
+    error_codes = [-3, 13, 504, 404, 10060, 104]
+    # 104, 'Connection reset by peer'
+    # 504 is for gateway timeout
+    # 404 is for URL error. Page not found.
+    # 10060 is for Operation Time out. There can be multiple reasons for this timeout
     
-    if errno == -3 or errno == 13:
-        #TODO: Find out what these error codes are for
-        # and better document them the next time you find it out.
-        # 13 is for "Permission Denied" when you don't have privileges to access the destination 
+    #TODO: Find out what these error codes are for
+    # and better document them the next time you find it out.
+    # 13 is for "Permission Denied" when you don't have privileges to access the destination 
+    if errno in error_codes:
+        log.err("%s - %s - %s\n" % (filename, errno, errormsg))
+        log.verbose(" Will still try with other package uris\n\n")
         pass
     elif errno == 407 or errno == 2:
         # These, I believe are from OSError/IOError exception.
         # I'll document it as soon as I confirm it.
         log.err("%s\n" % (errormsg))
         sys.exit(errno)
-    elif errno == 504 or errno == 404 or errno == 10060:
-        # 504 is for gateway timeout
-        # On gateway timeouts we can keep trying out becuase
-        # one apt source.list might have different hosts.
-        # 404 is for URL error. Page not found.
-        # THere can be instances where one source is changed but the rest are working.
-        # 10060 is for Operation Time out. There can be multiple reasons for this timeout
-        # Primarily if the host is down or a slow network or abruption, hence not the whole execution should be aborted
-        log.err("%s - %s - %s\n" % (filename, errno, errormsg))
-        log.verbose(" Will still try with other package uris\n\n")
-        pass
     elif errno == 1:
-        # We'll pass error code 1 where ever we want to gracefully exit
         log.err(errormsg)
         log.err("Explicit program termination %s\n" % (errno))
         sys.exit(errno)
@@ -494,7 +489,6 @@
     '''
     
     cache_dir = ArgumentOptions.cache_dir
-    zip_bool = ArgumentOptions.zip_it
     
     class FetcherClass(DownloadFromWeb, Archiver, MD5Check):
         def __init__(self, width, lock):
@@ -568,28 +562,33 @@
             for item in FetchData.get(key):
                 
                 (url, file, download_size, checksum) = stripper(each_single_item)
-                PackageName = file.split("_")[0]
-                log.msg("Downloading %s\n" % (file) ) 
                 
                 if key == 'Update':
+                    temp_file = file.split("_")
+                    PackageName = temp_file[0]
+                    PackageName += " - " + temp_file[len(temp_file) - 1]
+                    del temp_file
+                    
+                    log.msg("Downloading %s\n" % (PackageName) ) 
                     if FetcherInstance.download_from_web(url, file, download_path) != True:
                         errlist.append(file)
                     else:
-                        log.success("\r%s %s done.\n" % (file, "    ") )
-                        if zip_bool:
+                        log.success("\n%s done.\n" % (PackageName) )
+                        if ArgumentOptions.zip_it:
                             if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
                                 log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_update_file) )
                                 os.unlink(os.path.join(download_path, file) ) # Remove it because we don't need the file once it is zipped.
-                                sys.exit(1)
+                                #sys.exit(1)
                         pass
                                         
                 elif key == 'Upgrade':
+                    PackageName = file.split("_")[0]
                     if cache_dir is None:
                         log.msg("Downloading %s - %d KB\n" % (file, size/1024))
                         if FetcherInstance.download_from_web(url, file, download_path) != True:
                             errlist.append(PackageName)
-                            if zip_bool:
-                                log.success("\r%s %s done.\n" % (PackageName, "    "))
+                            if ArgumentOptions.zip_it:
+                                log.success("\n%s done.\n" % (PackageName) )
                                 FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                 os.unlink(os.path.join(download_path, file))
                     else:
@@ -598,7 +597,7 @@
                             if FetcherInstance.download_from_web(url, file, download_path) != True:
                                  errlist.append(PackageName)
                             else:
-                                log.success("\r%s %s done.\n" % (PackageName, "    "))
+                                log.success("\n%s done.\n" % (PackageName) )
                                 if os.access(os.path.join(cache_dir, file), os.F_OK):
                                     log.verbose("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir) ) #INFO: The file is already there.
                                 else:
@@ -608,11 +607,11 @@
                                     else:
                                         log.verbose("Cannot copy %s to %s. Is %s writeable??\n" % (file, cache_dir))
                                         
-                                if zip_bool:
+                                if ArgumentOptions.zip_it:
                                     FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                     os.unlink(os.path.join(download_path, file))
                         elif True:
-                            if zip_bool:
+                            if ArgumentOptions.zip_it:
                                 FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                 os.unlink(os.path.join(download_path, file))
                                 
@@ -637,10 +636,13 @@
                     break
                 (key, item) = tuple_item_key
                 (url, file, download_size, checksum) = stripper(item)
-                PackageName = file.split("_")[0]
                 thread_name = threading.currentThread().getName()
                 
                 if key == 'Update':
+                    temp_file = file.split("_")
+                    PackageName = temp_file[0]
+                    PackageName += " - " + temp_file[len(temp_file) - 1]
+                    del temp_file
                     
                     #INFO: We pass None as a filename here because we don't want to do a tree search of
                     # update files. Update files are changed daily and there is no point in doing a search of
@@ -653,11 +655,11 @@
                     exit_status = response.get()
                     
                     if exit_status == False:
-                        log.msg("Downloading %s\n" % (file) ) 
+                        log.msg("Downloading %s\n" % (PackageName) ) 
                         
                         if FetcherInstance.download_from_web(url, file, download_path) == True:
-                            log.success("%s done.\n" % (file) )
-                            if zip_bool:
+                            log.success("\r%s done.%s\n" % (PackageName, " "* 40) )
+                            if ArgumentOptions.zip_it:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
                                     log.err("Couldn't archive %s to file %s.\n" % (file, ArgumentOptions.zip_update_file) )
                                     sys.exit(1)
@@ -666,6 +668,7 @@
                             errlist.append(file)
                                 
                 elif key == 'Upgrade':
+                    PackageName = file.split("_")[0]
                     response.put(func(cache_dir, file) ) 
                     #INFO: find_first_match() returns False of a file name with absolute path
                     full_file_path = response.get()
@@ -673,7 +676,7 @@
                     if full_file_path != False:
                         if ArgumentOptions.disable_md5check is False:
                             if FetcherInstance.md5_check(full_file_path, checksum) is True:
-                                if zip_bool:
+                                if ArgumentOptions.zip_it:
                                     if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, full_file_path) is True:
                                         log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
                                 else:
@@ -687,7 +690,7 @@
                                 log.verbose("%s MD5 checksum mismatch. Skipping file.\n" % (file) )
                                 log.msg("Downloading %s - %d KB\n" % (PackageName, download_size/1024) )
                                 if FetcherInstance.download_from_web(url, file, download_path) == True:
-                                    log.success("%s done.\n" % (PackageName) )
+                                    log.success("\r%s done.%s\n" % (PackageName, " "* 40) )
                                     if ArgumentOptions.cache_dir:
                                         try:
                                             shutil.copy(file, cache_dir)
@@ -721,19 +724,19 @@
                                         except shutil.Error:
                                             log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, ArgumentOptions.cache_dir) )
                                             
-                                    if zip_bool:
+                                    if ArgumentOptions.zip_it:
                                         if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
                                             log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                             sys.exit(1)
                                         log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                         os.unlink(os.path.join(download_path, file) )
-                            if zip_bool:
+                            if ArgumentOptions.zip_it:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
                                     log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                     sys.exit(1)
                                 log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                 os.unlink(os.path.join(download_path, file) )
-                            log.success("\n%s done.\n" % (PackageName) )
+                            log.success("\r%s done.%s\n" % (PackageName, " "* 40) )
                         else:
                             #log.err("Couldn't find %s\n" % (PackageName) )
                             errlist.append(PackageName)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-05-19 18:03:17
       | 
| Revision: 145
          http://svn.sourceforge.net/pypt-offline/?rev=145&view=rev
Author:   riteshsarraf
Date:     2007-05-19 11:03:15 -0700 (Sat, 19 May 2007)
Log Message:
-----------
* Code formatting and uncluttering
* Finally finalized the bug fetching code. Now syncer() needs to have parsing done.
* Changed the num of spaces to be printed to 60
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-05-12 23:04:14 UTC (rev 144)
+++ trunk/pypt_core.py	2007-05-19 18:03:15 UTC (rev 145)
@@ -55,6 +55,7 @@
         return False
         
 class ProgressBar(object):
+    
     def __init__(self, minValue = 0, maxValue = 0, width = None, fd = sys.stderr):
         #width does NOT include the two places for [] markers
         self.min = minValue
@@ -62,15 +63,19 @@
         self.span = float(self.max - self.min)
         self.fd = fd
         self.signal_set = False
+        
         if width is None:
+            
             try:
                 self.handle_resize(None, None)
                 signal.signal(signal.SIGWINCH, self.handle_resize)
                 self.signal_set = True
             except:
                 self.width = 79 #The standard
+                
         else:
             self.width = width
+            
         self.value = self.min
         self.items = 0 #count of items being tracked
         self.complete = 0
@@ -86,8 +91,10 @@
         
     def completed(self):
         self.complete = self.complete + 1
+        
         if self.signal_set:
             signal.signal(signal.SIGWINCH, signal.SIG_DFL)
+            
         self.display()
         
     def addItem(self, maxValue):
@@ -106,6 +113,7 @@
         return ("[" + "#"*widthFilled + " "*(self.width - widthFilled) + "]" + " %5.1f%% of %d KB" % (percentFilled * 100.0, self.max/1024))
     
 class Log:
+    
     '''A OOP implementation for logging.
     warnings is to tackle the warning option
     verbose is to tackle the verbose option
@@ -138,39 +146,49 @@
         
     def msg(self, msg):
         'Print general messages'
+        
         if self.color:
             WConio.textcolor(15)
+            
         sys.stdout.write(msg)
         sys.stdout.flush()
         
     def err(self, msg):
         'Print messages with an error'
+        
         if self.color:
             WConio.textcolor(4)
+            
         sys.stderr.write(msg)
         sys.stderr.flush()
         
     def success(self, msg):
         'Print messages with a success'
+        
         if self.color:
             WConio.textcolor(2)
+            
         sys.stdout.write(msg)
         sys.stdout.flush()
     
     # For the rest, we need to check the options also
     def warn(self, msg):
         'Print warnings'
+        
         if self.WARN is True:
             if self.color:
                 WConio.textcolor(12)
+                
             sys.stderr.write(msg)
             sys.stderr.flush()
 
     def verbose(self, msg):
         'Print verbose messages'
+        
         if self.VERBOSE is True:
             if self.color:
                 WConio.textcolor(11)
+                
             sys.stdout.write(msg)
             sys.stdout.flush()
             
@@ -200,7 +218,6 @@
         try:
             if self.lock:
                 self.ZipLock.acquire(True)
-            
             filename = zipfile.ZipFile(zip_file_name, "a")
         except IOError:
             #INFO: By design zipfile throws an IOError exception when you open
@@ -209,11 +226,12 @@
         #except:
             #TODO Handle the exception
             #return False
-                    
         filename.write(files_to_compress, files_to_compress, zipfile.ZIP_DEFLATED)                        
         filename.close()
+        
         if self.lock:
             self.ZipLock.release()
+            
         return True
         
     def decompress_the_file(self, archive_file, path, target_file, archive_type):
@@ -238,7 +256,6 @@
                             
             if TarGzipBZ2_Uncomprerssed(read_from, write_to) != True:
                 raise ArchiveError
-            
             write_to.close()
             read_from.close()
             return True
@@ -261,7 +278,6 @@
             
             if TarGzipBZ2_Uncomprerssed(read_from, write_to) != True:
                 raise ArchiveError
-            
             write_to.close()
             read_from.close()
             return True
@@ -275,38 +291,53 @@
                 
             for filename in zip_file.namelist():
                 data = zip_file.read()
-                
             zip_file.close()
             return True
+        
         else:
             return False
 
 
-def FetchBugReportsDebian(PackageName, FileHandle):
+def FetchBugReportsDebian(PackageName, ZipFileName=None, lock=False):
     try:
         import debianbts
     except ImportError:
         return False
     
     bug_list = []
-    file_handle = open(FileHandle, 'w')
+    if ZipFileName is not None:
+        AddToArchive = Archiver(lock)
     
     (num_of_bugs, header, bugs_list) = debianbts.get_reports(PackageName)
 
     if num_of_bugs:
         for x in bugs_list:
             (sub_bugs_header, sub_bugs_list) = x
+            
             if not "Resolved bugs" in sub_bugs_header:
+                
                 for x in sub_bugs_list:
                     break_bugs = x.split(':')
                     bug_num = string.lstrip(break_bugs[0], '#')
                     data = debianbts.get_report(bug_num, followups=True)
+                    FileName = PackageName + "." + bug_num
+                    file_handle = open(FileName, 'w')
                     file_handle.write(data[0] + "\n\n")
+                    
                     for x in data[1]:
                         file_handle.write(x)
                         file_handle.write("\n")
+                        
                     file_handle.write("\n" * 3)
                     file_handle.flush()
+                    file_handle.close()
+                    
+                    if ZipFileName is not None:
+                        AddToArchive.compress_the_file(ZipFileName, FileName)
+                        os.unlink(FileName)
+                        
+        return True
+    return False
     
     
 def files(root): 
@@ -359,6 +390,7 @@
                 i += block_size
                 counter += 1
                 self.updateValue(increment)
+                
             self.completed()
             data.close()
             temp.close()
@@ -467,15 +499,18 @@
         log.err("%s - %s - %s\n" % (filename, errno, errormsg))
         log.verbose(" Will still try with other package uris\n\n")
         pass
+    
     elif errno == 407 or errno == 2:
         # These, I believe are from OSError/IOError exception.
         # I'll document it as soon as I confirm it.
         log.err("%s\n" % (errormsg))
         sys.exit(errno)
+        
     elif errno == 1:
         log.err(errormsg)
         log.err("Explicit program termination %s\n" % (errno))
         sys.exit(errno)
+        
     else:
         log.err("Aieee! I don't understand this errorcode\n" % (errno))
         sys.exit(errno)
@@ -585,15 +620,27 @@
                     PackageName = file.split("_")[0]
                     if cache_dir is None:
                         log.msg("Downloading %s - %d KB\n" % (file, size/1024))
+                        
                         if FetcherInstance.download_from_web(url, file, download_path) != True:
                             errlist.append(PackageName)
+                            
+                            bug_report_fetch_flag = 0
                             if ArgumentOptions.zip_it:
                                 log.success("\n%s done.\n" % (PackageName) )
                                 FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                 os.unlink(os.path.join(download_path, file))
+                                if ArgumentOptions.deb_bugs:
+                                    if FetchBugReportsDebian(PackageName, ArgumentOptions.zip_upgrade_file) is True:
+                                        log.verbose("Fetched bug reports for package %s and archived to file %s.\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                        bug_report_fetch_flag = 1
+                                    
+                            if ArgumentOptions.deb_bugs and bug_report_fetch_flag != 1:
+                                if FetchBugReportsDebian(PackageName) is True:
+                                    log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                     else:
                         if find_first_match(cache_dir, file, download_path, checksum) == False:
                             log.msg("Downloading %s - %d KB\n" % (PackageName, size/1024))
+                            
                             if FetcherInstance.download_from_web(url, file, download_path) != True:
                                  errlist.append(PackageName)
                             else:
@@ -607,14 +654,31 @@
                                     else:
                                         log.verbose("Cannot copy %s to %s. Is %s writeable??\n" % (file, cache_dir))
                                         
+                                bug_report_fetch_flag = 0
                                 if ArgumentOptions.zip_it:
                                     FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                     os.unlink(os.path.join(download_path, file))
+                                    if ArgumentOptions.deb_bugs:
+                                        FetchBugReportsDebian(PackageName, ArgumentOptions.zip_upgrade_file)
+                                        log.verbose("Fetched bug reports for package %s and archived to file %s.\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                        bug_report_fetch_flag = 1
+                                        
+                                if ArgumentOptions.deb_bugs and bug_report_fetch_flag != 1:
+                                    if FetchBugReportsDebian(PackageName) is True:
+                                        log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                         elif True:
+                            bug_report_fetch_flag = 0
                             if ArgumentOptions.zip_it:
                                 FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
                                 os.unlink(os.path.join(download_path, file))
-                                
+                                if ArgumentOptions.deb_bugs:
+                                    if FetchBugReportsDebian(PackageName, ArgumentOptions.zip_upgrade_file) is True:
+                                        log.verbose("Fetched bug reports for package %s and archived to file %s.\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                        bug_report_fetch_flag = 1
+                                    
+                            if ArgumentOptions.deb_bugs and bug_report_fetch_flag != 1:
+                                if FetchBugReportsDebian(PackageName) is True:
+                                    log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                 else:
                     raise FetchDataKeyError
                     
@@ -658,7 +722,7 @@
                         log.msg("Downloading %s\n" % (PackageName) ) 
                         
                         if FetcherInstance.download_from_web(url, file, download_path) == True:
-                            log.success("\r%s done.%s\n" % (PackageName, " "* 40) )
+                            log.success("\r%s done.%s\n" % (PackageName, " "* 60) )
                             if ArgumentOptions.zip_it:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
                                     log.err("Couldn't archive %s to file %s.\n" % (file, ArgumentOptions.zip_update_file) )
@@ -679,18 +743,25 @@
                                 if ArgumentOptions.zip_it:
                                     if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, full_file_path) is True:
                                         log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
+                                        if ArgumentOptions.deb_bugs:
+                                            if FetchBugReportsDebian(PackageName, ArgumentOptions.zip_upgrade_file, lock=True) is True:
+                                                log.verbose("Fetched bug reports for package %s and archived to file %s.\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
                                 else:
                                     try:
                                         shutil.copy(full_file_path, download_path)
                                         log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
                                     except shutil.Error:
                                         log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, download_path) )
+                                                
+                                    if ArgumentOptions.deb_bugs:
+                                        if FetchBugReportsDebian(PackageName, lock=True) is True:
+                                            log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                                         
                             else:
                                 log.verbose("%s MD5 checksum mismatch. Skipping file.\n" % (file) )
                                 log.msg("Downloading %s - %d KB\n" % (PackageName, download_size/1024) )
                                 if FetcherInstance.download_from_web(url, file, download_path) == True:
-                                    log.success("\r%s done.%s\n" % (PackageName, " "* 40) )
+                                    log.success("\r%s done.%s\n" % (PackageName, " "* 60) )
                                     if ArgumentOptions.cache_dir:
                                         try:
                                             shutil.copy(file, cache_dir)
@@ -702,9 +773,18 @@
                                             log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                             sys.exit(1)
                                         os.unlink(os.path.join(download_path, file) )
+                                        bug_report_fetch_flag = 0
+                                        if ArgumentOptions.deb_bugs:
+                                            if FetchBugReportsDebian(PackageName, ArgumentOptions.zip_upgrade_file, lock=True) is True:
+                                                log.verbose("Fetched bug reports for package %s and archived to file %s.\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                                bug_report_fetch_flag = 1
+                                                
+                                    if ArgumentOptions.deb_bugs and bug_report_fetch_flag != 1:
+                                        if FetchBugReportsDebian(PackageName, lock=True) is True:
+                                            log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                                         
                         else:
-                            #INFO: If md5check is disabled, just copy it.
+                            #INFO: If md5check is disabled, just copy it to the cache_dir
                             try:
                                 shutil.copy(full_file_path, download_path)
                                 log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
@@ -730,13 +810,32 @@
                                             sys.exit(1)
                                         log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                         os.unlink(os.path.join(download_path, file) )
+                                        bug_report_fetch_flag = 0
+                                        if ArgumentOptions.deb_bugs:
+                                            if FetchBugReportsDebian(PackageName, ArgumentOptions.zip_upgrade_file, lock=True) is True:
+                                                log.verbose("Fetched bug reports for package %s and archived to file %s.\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                                bug_report_fetch_flag = 1
+                                    if ArgumentOptions.deb_bugs:
+                                        if FetchBugReportsDebian(PackageName, lock=True) is True:
+                                            log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                            
                             if ArgumentOptions.zip_it:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
                                     log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                     sys.exit(1)
                                 log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                 os.unlink(os.path.join(download_path, file) )
-                            log.success("\r%s done.%s\n" % (PackageName, " "* 40) )
+                                bug_report_fetch_flag = 0
+                                if ArgumentOptions.deb_bugs:
+                                    if FetchBugReportsDebian(PackageName, ArgumentOptions.zip_upgrade_file, lock=True) is True:
+                                        log.verbose("Fetched bug reports for package %s and archived to file %s.\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                        bug_report_fetch_flag = 1
+                                        
+                            if ArgumentOptions.deb_bugs and bug_report_fetch_flag != 1:
+                                if FetchBugReportsDebian(PackageName, lock=True) is True:
+                                    log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                    
+                            log.success("\r%s done.%s\n" % (PackageName, " "* 60) )
                         else:
                             #log.err("Couldn't find %s\n" % (PackageName) )
                             errlist.append(PackageName)
@@ -916,7 +1015,7 @@
                       help="Install the fetched packages to the  NONET machine and _upgrade_ the packages on the NONET machine. This command must be executed on the NONET machine",
                       action="store", type="string", metavar="pypt-offline-upgrade.zip")
     parser.add_option("", "--fetch-bug-reports", dest="deb_bugs",
-                      help="Fetch bug reports from the BTS", action="store_false")
+                      help="Fetch bug reports from the BTS", action="store_true")
     #global options, args
     (options, args) = parser.parse_args()
     
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-05-19 20:50:08
       | 
| Revision: 146
          http://svn.sourceforge.net/pypt-offline/?rev=146&view=rev
Author:   riteshsarraf
Date:     2007-05-19 13:50:10 -0700 (Sat, 19 May 2007)
Log Message:
-----------
* Not all bug reports should be downloaded.
* I'm wondering if the bug reports classified as Normal are worth downloading ? The bugs that break the system are usually Important, Severe or Grave.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-05-19 18:03:15 UTC (rev 145)
+++ trunk/pypt_core.py	2007-05-19 20:50:10 UTC (rev 146)
@@ -305,6 +305,10 @@
         return False
     
     bug_list = []
+    bug_types = ["Resolved bugs", "Minor bugs", "Wishlist items"]
+    #INFO: These are the ignore bug types. No one should really be caring about these
+    
+    
     if ZipFileName is not None:
         AddToArchive = Archiver(lock)
     
@@ -314,8 +318,13 @@
         for x in bugs_list:
             (sub_bugs_header, sub_bugs_list) = x
             
-            if not "Resolved bugs" in sub_bugs_header:
-                
+            for BugType in bug_types:
+                if BugType in sub_bugs_header:
+                    bug_flag = 0
+                    break
+                bug_flag = 1
+                    
+            if bug_flag:
                 for x in sub_bugs_list:
                     break_bugs = x.split(':')
                     bug_num = string.lstrip(break_bugs[0], '#')
@@ -921,6 +930,8 @@
             #data.write(file.read(filename))
             #data = file.read(filename)
             
+            # retval = subprocess.call(['less', filename])
+            
             if pypt_magic.file(os.path.abspath(filename)) == "application/x-bzip2":
                 archive.decompress_the_file(os.path.abspath(filename), target_path, filename, 1)
             elif pypt_magic.file(os.path.abspath(filename)) == "application/x-gzip":
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-06-10 14:02:56
       | 
| Revision: 148
          http://svn.sourceforge.net/pypt-offline/?rev=148&view=rev
Author:   riteshsarraf
Date:     2007-06-10 07:02:58 -0700 (Sun, 10 Jun 2007)
Log Message:
-----------
* Minor changes for syncer()
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-05-24 12:26:56 UTC (rev 147)
+++ trunk/pypt_core.py	2007-06-10 14:02:58 UTC (rev 148)
@@ -934,8 +934,13 @@
 def syncer(install_file_path, target_path, arg_type=None):
     '''Syncer does the work of syncing the downloaded files.
     It syncs "install_file_path" which could be a valid file path
-    or a zip archive to "target_path'''
+    or a zip archive to "target_path"
+    arg_type defines whether install_file_path is a zip file
+    or a folder path
     
+    1 => install_file_path is a File
+    2 => install_file_path is a Folder'''
+    
     archive = Archiver()
     if arg_type == 1:
         try:
@@ -982,9 +987,9 @@
             os.unlink(filename)
                 
     elif arg_type == 2:
+        archive_file_types = ['application/x-bzip2', 'application/gzip', 'application/zip']
         for eachfile in os.listdir(install_file_path):
             
-            archive_file_types = ['application/x-bzip2', 'application/gzip', 'application/zip']
             archive_type = None
             try:
                 import pypt_magic
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-06-13 13:41:22
       | 
| Revision: 149
          http://svn.sourceforge.net/pypt-offline/?rev=149&view=rev
Author:   riteshsarraf
Date:     2007-06-13 06:41:24 -0700 (Wed, 13 Jun 2007)
Log Message:
-----------
* Include socket timeout so that we don't wait for eternity. Timeout is 15 seconds
* Many bugs are marked as FIXED. These also need to be ignored from being downloaded
* find_first_match() had a very bad bug where it would always exit on the first iteration. :-( FIXED NOW
* Do a check of cache_dir in the beginning to see if the user has propery input the full path of cache dir
* Clarified about cache_dir input in the help section of optparse
* When not doing md5check, still we need to fetch bugs and if zip, zip it.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-06-10 14:02:58 UTC (rev 148)
+++ trunk/pypt_core.py	2007-06-13 13:41:24 UTC (rev 149)
@@ -11,7 +11,12 @@
 import array
 
 from array import array
+from socket import setdefaulttimeout
 
+#INFO: Set the default timeout to 15 seconds for the packages that are being downloaded.
+setdefaulttimeout(15)
+
+
 #INFO: They aren't on Windows
 try:
     from fcntl import ioctl
@@ -286,7 +291,7 @@
 
 
 class FetchBugReports(Archiver):
-    def __init__(self, bugTypes=["Resolved bugs", "Normal bugs", "Minor bugs", "Wishlist items"], lock=False, ArchiveFile=None):
+    def __init__(self, bugTypes=["Resolved bugs", "Normal bugs", "Minor bugs", "Wishlist items", "FIXED"], lock=False, ArchiveFile=None):
         
         self.bugsList = []
         self.bugTypes = bugTypes
@@ -367,13 +372,18 @@
     Else Return False'''
 
     # Do the sanity check first
-    if cache_dir is None or filename is None or os.path.isdir(cache_dir) is False:
+    #if cache_dir is None or filename is None or os.path.isdir(cache_dir) is False:
+    if cache_dir is None:
         return False
+    elif filename is None:
+        return False
+    elif os.path.isdir(cache_dir) is False:
+        return False
     else:
         for path, file in files(cache_dir): 
             if file == filename:
                 return os.path.join(path, file)
-            return False
+        return False
         
         
         
@@ -553,6 +563,8 @@
     '''
     
     cache_dir = ArgumentOptions.cache_dir
+    if os.path.isdir(cache_dir) is False:
+        log.verbose("WARNING: cache dir is incorrect. Did you give the full path ?\n")
     
     class FetcherClass(DownloadFromWeb, Archiver, MD5Check):
         def __init__(self, width, lock):
@@ -839,6 +851,24 @@
                                 log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
                             except shutil.Error:
                                 log.verbose("%s already available in dest_dir. Skipping copy!!!\n\n" % (file) )
+                                
+                            if ArgumentOptions.deb_bugs:
+                                bug_fetched = 0
+                                if FetchBugReportsDebian.FetchBugsDebian(PackageName):
+                                    log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                    bug_fetched = 1
+                            
+                            file = full_file_path.split("/")
+                            file = file[len(file) - 1]
+                            if ArgumentOptions.zip_it:
+                                if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
+                                    log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                    sys.exit(1)
+                                os.unlink(os.path.join(download_path, file) )
+                                
+                                if bug_fetched:
+                                    if FetchBugReportsDebian.AddToArchive(ArgumentOptions.zip_upgrade_file):
+                                        log.verbose("Archived bug reports for package %s to archive %s\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
                     else:
                         log.verbose("%s not available in local cache %s.\n" % (file, ArgumentOptions.cache_dir) )
                         log.msg("Downloading %s - %d KB\n" % (PackageName, download_size/1024) )
@@ -1022,7 +1052,7 @@
     parser.add_option("-d","--download-dir", dest="download_dir",
                       help="Root directory path to save the downloaded files", action="store", type="string", metavar="pypt-downloads")
     parser.add_option("-s","--cache-dir", dest="cache_dir",
-                      help="Root directory path where the pre-downloaded files will be searched. If not, give a period '.'",
+                      help="Root directory path where the pre-downloaded files will be searched.Make sure you give the full path of the cache directory. If not, give a period '.'",
                       action="store", type="string", metavar=".")
     parser.add_option("--verbose", dest="verbose", help="Enable verbose messages", action="store_true")
     parser.add_option("-u","--uris", dest="uris_file",
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-24 08:04:12
       | 
| Revision: 151
          http://svn.sourceforge.net/pypt-offline/?rev=151&view=rev
Author:   riteshsarraf
Date:     2007-07-24 01:04:15 -0700 (Tue, 24 Jul 2007)
Log Message:
-----------
Lesson learnt, "Don't fuckin' wait for a commit for long with huge chagnes. You forget what all these changes are doing."
* Some checks in FetchBugRports
* More checks for cache_dir validity
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-06-29 14:35:45 UTC (rev 150)
+++ trunk/pypt_core.py	2007-07-24 08:04:15 UTC (rev 151)
@@ -1,4 +1,5 @@
 import os
+import md5
 import sys
 import shutil
 import platform
@@ -47,15 +48,15 @@
        
 class MD5Check:
     
-    def md5_string(data):
+    def md5_string(self, data):
         hash = md5.new()
         hash.update(data.read())
         return hash.hexdigest() 
     
-    def md5_check(file, checksum):
+    def md5_check(self, file, checksum):
         data = open(file, 'rb')
         #local = md5_string(data)
-        if checksum == md5_string(data):
+        if checksum == self.md5_string(data):
             return True
         return False
         
@@ -350,9 +351,15 @@
                         file_handle.flush()
                         file_handle.close()
                         
+                        #We're adding to an archive file here.
                         if self.lock:
                             self.AddToArchive(self.ArchiveFile)
-            return True
+            if bug_downloaded:
+                return True
+            else:
+                return False
+        return False
+    #return False
     
     def AddToArchive(self, ArchiveFile):
         if self.compress_the_file(self.ArchiveFile, self.fileName):
@@ -563,8 +570,9 @@
     '''
     
     cache_dir = ArgumentOptions.cache_dir
-    if os.path.isdir(cache_dir) is False:
-        log.verbose("WARNING: cache dir is incorrect. Did you give the full path ?\n")
+    if cache_dir is not None:
+        if os.path.isdir(cache_dir) is False:
+            log.verbose("WARNING: cache dir is incorrect. Did you give the full path ?\n")
     
     class FetcherClass(DownloadFromWeb, Archiver, MD5Check):
         def __init__(self, width, lock):
@@ -786,41 +794,54 @@
                 elif key == 'Upgrade':
                     PackageName = file.split("_")[0]
                     response.put(func(cache_dir, file) ) 
-                    #INFO: find_first_match() returns False of a file name with absolute path
+                    #INFO: find_first_match() returns False or a file name with absolute path
                     full_file_path = response.get()
                     
+                    #INFO: If we find the file in the local cache_dir, we'll execute this block.
                     if full_file_path != False:
+                        
+                        # We'll first check for its md5 checksum
                         if ArgumentOptions.disable_md5check is False:
+                            
                             if FetcherInstance.md5_check(full_file_path, checksum) is True:
+                                log.verbose("md5checksum correct for package %s.\n" % (PackageName) )
+                                
                                 if ArgumentOptions.deb_bugs:
                                     bug_fetched = 0
+                                    log.verbose("Fetching bug reports for package %s.\n" (PackageName) )
                                     if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                         log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                                         bug_fetched = 1
                                 
                                 if ArgumentOptions.zip_it:
+                                    
                                     if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, full_file_path) is True:
                                         log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
-                                        
-                                        if ArgumentOptions.deb_bugs:
-                                            if FetchBugReportsDebian(PackageName, ArgumentOptions.zip_upgrade_file, lock=True) is True:
-                                                log.verbose("Fetched bug reports for package %s and archived to file %s.\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                            
+                                #INFO: If no zip option enabled, simply copy the downloaded package file
+                                # along with the downloaded bug reports.
                                 else:
                                     try:
                                         shutil.copy(full_file_path, download_path)
                                         log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
                                     except shutil.Error:
                                         log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, download_path) )
-                                                
-                                    if ArgumentOptions.deb_bugs:
-                                        if FetchBugReportsDebian.FetchBugsDebian(PackageName):
-                                            log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                    
+                                    if bug_fetched == 1:
+                                        for x in os.listdir(os.curdir()):
+                                            if x.startswith(PackageName):
+                                                shutil.move(x, download_path)
+                                                log.verbose("Moved %s file to %s folder.\n" % (x, download_path) )
                                         
+                            #INFO: Damn!! The md5chesum didn't match :-(
+                            # The file is corrupted and we need to download a new copy from the internet
                             else:
                                 log.verbose("%s MD5 checksum mismatch. Skipping file.\n" % (file) )
                                 log.msg("Downloading %s - %d KB\n" % (PackageName, download_size/1024) )
                                 if FetcherInstance.download_from_web(url, file, download_path) == True:
                                     log.success("\r%s done.%s\n" % (PackageName, " "* 60) )
+                                    
+                                    #Add to cache_dir if possible
                                     if ArgumentOptions.cache_dir:
                                         try:
                                             shutil.copy(file, cache_dir)
@@ -828,11 +849,10 @@
                                         except shutil.Error:
                                             log.verbose("Couldn't copy %s  to %s\n\n" % (file, ArgumentOptions.cache_dir) )
                                             
+                                    #Fetch bug reports
                                     if ArgumentOptions.deb_bugs:
-                                        bug_fetched = 0
                                         if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                             log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
-                                            bug_fetched = 1
                                             
                                     if ArgumentOptions.zip_it:
                                         if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
@@ -840,10 +860,8 @@
                                             sys.exit(1)
                                         os.unlink(os.path.join(download_path, file) )
                                         
-                                        if bug_fetched:
-                                            if FetchBugReportsDebian.AddToArchive(ArgumentOptions.zip_upgrade_file):
-                                                log.verbose("Archived bug reports for package %s to archive %s\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
-                                        
+                        #INFO: You're and idiot.
+                        # You should NOT disable md5checksum for any files
                         else:
                             #INFO: If md5check is disabled, just copy it to the cache_dir
                             try:
@@ -853,28 +871,28 @@
                                 log.verbose("%s already available in dest_dir. Skipping copy!!!\n\n" % (file) )
                                 
                             if ArgumentOptions.deb_bugs:
-                                bug_fetched = 0
                                 if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                     log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
-                                    bug_fetched = 1
                             
                             file = full_file_path.split("/")
                             file = file[len(file) - 1]
+                            file = download_path + "/" + file
                             if ArgumentOptions.zip_it:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
                                     log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                     sys.exit(1)
                                 os.unlink(os.path.join(download_path, file) )
-                                
-                                if bug_fetched:
-                                    if FetchBugReportsDebian.AddToArchive(ArgumentOptions.zip_upgrade_file):
-                                        log.verbose("Archived bug reports for package %s to archive %s\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                        
                     else:
+                        #INFO: This block gets executed if the file is not found in local cache_dir or cache_dir is None
+                        # We go ahead and try to download it from the internet
                         log.verbose("%s not available in local cache %s.\n" % (file, ArgumentOptions.cache_dir) )
                         log.msg("Downloading %s - %d KB\n" % (PackageName, download_size/1024) )
                         if FetcherInstance.download_from_web(url, file, download_path) == True:
+                            
+                            #INFO: This block gets executed if md5checksum is allowed
                             if ArgumentOptions.disable_md5check is False:
-                                if FetcherInstance.md5_check(full_file_path, checksum) is True:
+                                if FetcherInstance.md5_check(file, checksum) is True:
                                             
                                     if ArgumentOptions.cache_dir:
                                         try:
@@ -884,10 +902,8 @@
                                             log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, ArgumentOptions.cache_dir) )
                                             
                                     if ArgumentOptions.deb_bugs:
-                                        bug_fetched = 0
                                         if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                             log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
-                                            bug_fetched = 1
                                             
                                     if ArgumentOptions.zip_it:
                                         if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
@@ -895,24 +911,19 @@
                                             sys.exit(1)
                                         log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                         os.unlink(os.path.join(download_path, file) )
+                                            
+                            else:
+                                if ArgumentOptions.deb_bugs:
+                                    if FetchBugReportsDebian.FetchBugsDebian(PackageName):
+                                        log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                                         
-                                        if bug_fetched:
-                                            if FetchBugReportsDebian.AddToArchive(ArgumentOptions.zip_upgrade_file):
-                                                log.verbose("Archived bug reports for package %s to archive %s\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                if ArgumentOptions.zip_it:
+                                    if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
+                                        log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                        sys.exit(1)
+                                    log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                    os.unlink(os.path.join(download_path, file) )
                                             
-                            if ArgumentOptions.deb_bugs:
-                                bug_fetched = 0
-                                if FetchBugReportsDebian.FetchBugsDebian(PackageName):
-                                    log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
-                                    bug_fetched = 1
-                                    
-                            if ArgumentOptions.zip_it:
-                                if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
-                                    log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
-                                    sys.exit(1)
-                                log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
-                                os.unlink(os.path.join(download_path, file) )
-                                                
                             log.success("\r%s done.%s\n" % (PackageName, " "* 60) )
                         else:
                             #log.err("Couldn't find %s\n" % (PackageName) )
@@ -1058,7 +1069,7 @@
     parser.add_option("-u","--uris", dest="uris_file",
                       help="Full path of the uris file which contains the main database of files to be downloaded",action="store", type="string")
     parser.add_option("","--disable-md5check", dest="disable_md5check",
-                      help="Disable md5checksum validation on downloaded files",action="store_true")
+                      help="Disable md5checksum validation on downloaded files",action="store_false", default=False)
     parser.add_option("", "--threads", dest="num_of_threads", help="Number of threads to spawn",
                       action="store", type="int", metavar="1", default=1)
        
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-24 08:26:20
       | 
| Revision: 152
          http://svn.sourceforge.net/pypt-offline/?rev=152&view=rev
Author:   riteshsarraf
Date:     2007-07-24 01:26:23 -0700 (Tue, 24 Jul 2007)
Log Message:
-----------
* bug_downloaded was being referenced before assignment in some cases. It was useless, just removed it.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-07-24 08:04:15 UTC (rev 151)
+++ trunk/pypt_core.py	2007-07-24 08:26:23 UTC (rev 152)
@@ -330,7 +330,6 @@
                     bug_flag = 1
                         
                 if bug_flag:
-                    bug_downloaded = True
                     
                     for x in sub_bugs_list:
                         break_bugs = x.split(':')
@@ -354,7 +353,7 @@
                         #We're adding to an archive file here.
                         if self.lock:
                             self.AddToArchive(self.ArchiveFile)
-            if bug_downloaded:
+            if bug_flag:
                 return True
             else:
                 return False
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-27 13:08:13
       | 
| Revision: 154
          http://pypt-offline.svn.sourceforge.net/pypt-offline/?rev=154&view=rev
Author:   riteshsarraf
Date:     2007-07-27 06:08:16 -0700 (Fri, 27 Jul 2007)
Log Message:
-----------
* Implement class Log to be threaded
* Also set back the original color once the color for the msg type is displayed.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-07-24 11:58:53 UTC (rev 153)
+++ trunk/pypt_core.py	2007-07-27 13:08:16 UTC (rev 154)
@@ -138,7 +138,7 @@
     Light Cyan = 11
     '''
     
-    def __init__(self, verbose, color = None):
+    def __init__(self, verbose, color = None, lock = None):
         
         if verbose is True:
             self.VERBOSE = True
@@ -146,44 +146,77 @@
         
         self.color = color
         
+        if lock is None or lock != 1:
+            self.DispLock = False
+        else:
+            self.DispLock = threading.Lock()
+            self.lock = True
+        
     def msg(self, msg):
         'Print general messages'
         
+        if self.lock:
+            self.DispLock.acquire(True)
+            
         if self.color:
             WConio.textcolor(15)
             
         sys.stdout.write(msg)
         sys.stdout.flush()
         
+        if self.lock:
+            self.DispLock.release()
+        
     def err(self, msg):
         'Print messages with an error'
         
+        if self.lock:
+            self.DispLock.acquire(True)
+            
         if self.color:
             WConio.textcolor(4)
             
         sys.stderr.write(msg)
         sys.stderr.flush()
+        WConio.textcolor(15) #Once the error is displayed, change back to the normal color
         
+        if self.lock:
+            self.DispLock.release()
+        
     def success(self, msg):
         'Print messages with a success'
         
+        if self.lock:
+            self.DispLock.acquire(True)
+            
         if self.color:
             WConio.textcolor(2)
             
         sys.stdout.write(msg)
         sys.stdout.flush()
+        WConio.textcolor(15) #Once the error is displayed, change back to the normal color
+        
+        if self.lock:
+            self.DispLock.release()
     
     # For the rest, we need to check the options also
 
     def verbose(self, msg):
         'Print verbose messages'
         
+        if self.lock:
+            self.DispLock.acquire(True)
+            
         if self.VERBOSE is True:
             if self.color:
                 WConio.textcolor(11)
                 
             sys.stdout.write(msg)
             sys.stdout.flush()
+            WConio.textcolor(15) #Once the error is displayed, change back to the normal color
+        
+        if self.lock:
+            self.DispLock.release()
             
 class Archiver:
     def __init__(self, lock=None):
@@ -1117,7 +1150,7 @@
         # The log implementation
         # Instantiate the class
         global log
-        log = Log(options.verbose, WindowColor)
+        log = Log(options.verbose, WindowColor, lock = True)
         
         log.msg("pypt-offline %s\n" % (version))
         log.msg("Copyright %s\n" % (copyright))
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-27 13:09:23
       | 
| Revision: 155
          http://pypt-offline.svn.sourceforge.net/pypt-offline/?rev=155&view=rev
Author:   riteshsarraf
Date:     2007-07-27 06:09:25 -0700 (Fri, 27 Jul 2007)
Log Message:
-----------
* Minor error. There is no variable called ziplock
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-07-27 13:08:16 UTC (rev 154)
+++ trunk/pypt_core.py	2007-07-27 13:09:25 UTC (rev 155)
@@ -221,7 +221,7 @@
 class Archiver:
     def __init__(self, lock=None):
         if lock is None or lock != 1:
-            self.ziplock = False
+            self.ZipLock = False
         else:
             self.ZipLock = threading.Lock()
             self.lock = True
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-27 19:03:48
       | 
| Revision: 156
          http://pypt-offline.svn.sourceforge.net/pypt-offline/?rev=156&view=rev
Author:   riteshsarraf
Date:     2007-07-27 12:03:50 -0700 (Fri, 27 Jul 2007)
Log Message:
-----------
* Enhanced socket handling
* Increased the socket timeout value to 30 secs
* Handle socket timeout exceptions when fetching data from the network
* Made WConio usage conditional because it is used primarily on Windows when the modules is available.
* Fixed the  logic for when to download bug reports and how to handle them.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-07-27 13:09:25 UTC (rev 155)
+++ trunk/pypt_core.py	2007-07-27 19:03:50 UTC (rev 156)
@@ -10,12 +10,12 @@
 import signal
 import optparse
 import array
+import socket
 
 from array import array
-from socket import setdefaulttimeout
 
 #INFO: Set the default timeout to 15 seconds for the packages that are being downloaded.
-setdefaulttimeout(15)
+socket.setdefaulttimeout(30)
 
 
 #INFO: They aren't on Windows
@@ -178,7 +178,9 @@
             
         sys.stderr.write(msg)
         sys.stderr.flush()
-        WConio.textcolor(15) #Once the error is displayed, change back to the normal color
+
+	if self.color:
+	    WConio.textcolor(15) #Once the error is displayed, change back to the normal color
         
         if self.lock:
             self.DispLock.release()
@@ -194,7 +196,9 @@
             
         sys.stdout.write(msg)
         sys.stdout.flush()
-        WConio.textcolor(15) #Once the error is displayed, change back to the normal color
+
+	if self.color:
+	    WConio.textcolor(15) #Once the error is displayed, change back to the normal color
         
         if self.lock:
             self.DispLock.release()
@@ -213,7 +217,8 @@
                 
             sys.stdout.write(msg)
             sys.stdout.flush()
-            WConio.textcolor(15) #Once the error is displayed, change back to the normal color
+	    if self.color:
+		WConio.textcolor(15) #Once the error is displayed, change back to the normal color
         
         if self.lock:
             self.DispLock.release()
@@ -350,7 +355,10 @@
             except IOError:
                 sys.exit(1)
         
-        (num_of_bugs, header, self.bugs_list) = debianbts.get_reports(PackageName)
+	try:
+	    (num_of_bugs, header, self.bugs_list) = debianbts.get_reports(PackageName)
+	except socket.timeout:
+	    return False
         
         if num_of_bugs:
             for x in self.bugs_list:
@@ -367,7 +375,10 @@
                     for x in sub_bugs_list:
                         break_bugs = x.split(':')
                         bug_num = string.lstrip(break_bugs[0], '#')
-                        data = debianbts.get_report(bug_num, followups=True)
+			try:
+			    data = debianbts.get_report(bug_num, followups=True)
+			except socket.timeout:
+			    return False
                         if Filename == None:
                             self.fileName = PackageName + "." + bug_num
                             file_handle = open(self.fileName, 'w')
@@ -499,6 +510,9 @@
             if hasattr(e, 'code') and hasattr(e, 'reason'):
                 errfunc(e.code, e.reason, file)
 
+	except socket.timeout:
+	    errfunc(101010, "Socket timeout.", file)
+
 def copy_first_match(cache_dir, filename, dest_dir, checksum): # aka new_walk_tree_copy() 
     '''Walks into "reposiotry" looking for "filename".
     If found, copies it to "dest_dir" but first verifies their md5 "checksum".'''
@@ -564,11 +578,13 @@
     This function does the job of behaving accordingly
     as per the error codes.
     '''
-    error_codes = [-3, 13, 504, 404, 10060, 104]
+    error_codes = [-3, 13, 504, 404, 10060, 104, 101010]
     # 104, 'Connection reset by peer'
     # 504 is for gateway timeout
     # 404 is for URL error. Page not found.
     # 10060 is for Operation Time out. There can be multiple reasons for this timeout
+    # 101010 - Dummy error code for socket timeouts. FIXME: Find the
+    # 		correct socket timeout error code
     
     #TODO: Find out what these error codes are for
     # and better document them the next time you find it out.
@@ -766,11 +782,16 @@
                                     
                             if ArgumentOptions.zip_it:
                                 FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
+                                log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                 os.unlink(os.path.join(download_path, file))
+                            else:
+                                #Copy the bug report to the target download_path folder
+                                if bug_fetched == 1:
+                                    for x in os.listdir(os.curdir()):
+                                        if x.startswith(PackageName):
+                                            shutil.move(x, download_path)
+                                            log.verbose("Moved %s file to %s folder.\n" % (x, download_path) )
                                 
-                                if bug_fetched:
-                                    if FetchBugReportsDebian.AddToArchive(ArgumentOptions.zip_upgrade_file):
-                                        log.verbose("Archived bug reports for package %s to archive %s\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
                 else:
                     raise FetchDataKeyError
                     
@@ -895,25 +916,35 @@
                         #INFO: You're and idiot.
                         # You should NOT disable md5checksum for any files
                         else:
-                            #INFO: If md5check is disabled, just copy it to the cache_dir
-                            try:
-                                shutil.copy(full_file_path, download_path)
-                                log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
-                            except shutil.Error:
-                                log.verbose("%s already available in dest_dir. Skipping copy!!!\n\n" % (file) )
-                                
                             if ArgumentOptions.deb_bugs:
+                                bug_fetched = 0
                                 if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                     log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
-                            
-                            file = full_file_path.split("/")
-                            file = file[len(file) - 1]
-                            file = download_path + "/" + file
+                                    bug_fetched = 1
+                                    
+                            #FIXME: Don't know why this was really required. If this has no changes, delete it.
+                            #file = full_file_path.split("/")
+                            #file = file[len(file) - 1]
+                            #file = download_path + "/" + file
                             if ArgumentOptions.zip_it:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
                                     log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                     sys.exit(1)
                                 os.unlink(os.path.join(download_path, file) )
+                            else:
+                                # Since zip file option is not enabled let's copy the file to the target folder
+                                try:
+                                    shutil.copy(full_file_path, download_path)
+                                    log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
+                                except shutil.Error:
+                                    log.verbose("%s already available in dest_dir. Skipping copy!!!\n\n" % (file) )
+                                    
+                                # And also the bug reports
+                                if bug_fetched == 1:
+                                    for x in os.listdir(os.curdir()):
+                                        if x.startswith(PackageName):
+                                            shutil.move(x, download_path)
+                                            log.verbose("Moved %s file to %s folder.\n" % (x, download_path) )
                                         
                     else:
                         #INFO: This block gets executed if the file is not found in local cache_dir or cache_dir is None
@@ -960,7 +991,8 @@
                         else:
                             #log.err("Couldn't find %s\n" % (PackageName) )
                             errlist.append(PackageName)
-                    
+                else:
+                    raise FetchDataKeyError
         # Create two Queues for the requests and responses
         requestQueue = Queue.Queue()
         responseQueue = Queue.Queue()
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-27 19:39:07
       | 
| Revision: 157
          http://pypt-offline.svn.sourceforge.net/pypt-offline/?rev=157&view=rev
Author:   riteshsarraf
Date:     2007-07-27 12:39:09 -0700 (Fri, 27 Jul 2007)
Log Message:
-----------
* Added more verbosity to messages
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-07-27 19:03:50 UTC (rev 156)
+++ trunk/pypt_core.py	2007-07-27 19:39:09 UTC (rev 157)
@@ -732,6 +732,8 @@
                                 if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                     log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                                     bug_fetched = 1
+                                else:
+                                    log.verbose("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
                             
                             if ArgumentOptions.zip_it:
                                 log.success("\n%s done.\n" % (PackageName) )
@@ -760,18 +762,18 @@
                                         log.verbose("Cannot copy %s to %s. Is %s writeable??\n" % (file, cache_dir))
                                         
                                 if ArgumentOptions.deb_bugs:
-                                    bug_fetched = 0
                                     if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                         log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
-                                        bug_fetched = 1
+                                    else:
+                                        log.verbose("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
                                         
                                 if ArgumentOptions.zip_it:
-                                    FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
-                                    os.unlink(os.path.join(download_path, file))
-                                    
-                                    if bug_fetched:
-                                            if FetchBugReportsDebian.AddToArchive(ArgumentOptions.zip_upgrade_file):
-                                                log.verbose("Archived bug reports for package %s to archive %s\n" % (PackageName, ArgumentOptions.zip_upgrade_file) )
+                                    if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
+                                        log.err("Couldn't add %s to archive %s.\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                        sys.exit(1)
+                                    else:
+                                        log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                        os.unlink(os.path.join(download_path, file))
                                         
                         elif True:
                             if ArgumentOptions.deb_bugs:
@@ -779,11 +781,16 @@
                                 if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                     log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                                     bug_fetched = 1
+                                else:
+                                    log.err("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
                                     
                             if ArgumentOptions.zip_it:
-                                FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
-                                log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_upgrade_file) )
-                                os.unlink(os.path.join(download_path, file))
+                                if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
+                                    log.err("Couldn't add %s to archive %s.\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                    sys.exit(1)
+                                else:
+                                    log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                    os.unlink(os.path.join(download_path, file))
                             else:
                                 #Copy the bug report to the target download_path folder
                                 if bug_fetched == 1:
@@ -840,7 +847,9 @@
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
                                     log.err("Couldn't archive %s to file %s.\n" % (file, ArgumentOptions.zip_update_file) )
                                     sys.exit(1)
-                                os.unlink(os.path.join(download_path, file) )
+                                else:
+                                    log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_update_file) )
+                                    os.unlink(os.path.join(download_path, file) )
                         else:
                             errlist.append(file)
                                 
@@ -865,11 +874,16 @@
                                     if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                         log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                                         bug_fetched = 1
+                                    else:
+                                        log.verbose("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
                                 
                                 if ArgumentOptions.zip_it:
                                     
                                     if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, full_file_path) is True:
                                         log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
+                                    else:
+                                        log.err("Couldn't add %s to archive %s.\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                        sys.exit(1)
                                             
                                 #INFO: If no zip option enabled, simply copy the downloaded package file
                                 # along with the downloaded bug reports.
@@ -906,12 +920,16 @@
                                     if ArgumentOptions.deb_bugs:
                                         if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                             log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                        else:
+                                            log.err("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
                                             
                                     if ArgumentOptions.zip_it:
                                         if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
                                             log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                             sys.exit(1)
-                                        os.unlink(os.path.join(download_path, file) )
+                                        else:
+                                            log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                            os.unlink(os.path.join(download_path, file) )
                                         
                         #INFO: You're and idiot.
                         # You should NOT disable md5checksum for any files
@@ -921,6 +939,8 @@
                                 if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                     log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
                                     bug_fetched = 1
+                                else:
+                                    log.verbose("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
                                     
                             #FIXME: Don't know why this was really required. If this has no changes, delete it.
                             #file = full_file_path.split("/")
@@ -930,7 +950,9 @@
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
                                     log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                     sys.exit(1)
-                                os.unlink(os.path.join(download_path, file) )
+                                else:
+                                    log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                    os.unlink(os.path.join(download_path, file) )
                             else:
                                 # Since zip file option is not enabled let's copy the file to the target folder
                                 try:
@@ -967,25 +989,31 @@
                                     if ArgumentOptions.deb_bugs:
                                         if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                             log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                        else:
+                                            log.err("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
                                             
                                     if ArgumentOptions.zip_it:
                                         if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
                                             log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                             sys.exit(1)
-                                        log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
-                                        os.unlink(os.path.join(download_path, file) )
+                                        else:
+                                            log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                            os.unlink(os.path.join(download_path, file) )
                                             
                             else:
                                 if ArgumentOptions.deb_bugs:
                                     if FetchBugReportsDebian.FetchBugsDebian(PackageName):
                                         log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                    else:
+                                        log.err("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
                                         
                                 if ArgumentOptions.zip_it:
                                     if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
                                         log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
                                         sys.exit(1)
-                                    log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
-                                    os.unlink(os.path.join(download_path, file) )
+                                    else:
+                                        log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                        os.unlink(os.path.join(download_path, file) )
                                             
                             log.success("\r%s done.%s\n" % (PackageName, " "* 60) )
                         else:
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-27 20:21:16
       | 
| Revision: 158
          http://pypt-offline.svn.sourceforge.net/pypt-offline/?rev=158&view=rev
Author:   riteshsarraf
Date:     2007-07-27 13:21:19 -0700 (Fri, 27 Jul 2007)
Log Message:
-----------
vim v/s Eclipse Editor
Both screw up indentation. Why can't they understand each other.
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-07-27 19:39:09 UTC (rev 157)
+++ trunk/pypt_core.py	2007-07-27 20:21:19 UTC (rev 158)
@@ -375,10 +375,10 @@
                     for x in sub_bugs_list:
                         break_bugs = x.split(':')
                         bug_num = string.lstrip(break_bugs[0], '#')
-			try:
-			    data = debianbts.get_report(bug_num, followups=True)
-			except socket.timeout:
-			    return False
+                        try:
+                            data = debianbts.get_report(bug_num, followups=True)
+                        except socket.timeout:
+                            return False
                         if Filename == None:
                             self.fileName = PackageName + "." + bug_num
                             file_handle = open(self.fileName, 'w')
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-27 20:57:35
       | 
| Revision: 159
          http://pypt-offline.svn.sourceforge.net/pypt-offline/?rev=159&view=rev
Author:   riteshsarraf
Date:     2007-07-27 13:57:31 -0700 (Fri, 27 Jul 2007)
Log Message:
-----------
* Introduce a global variable, pypt_bug_file_format. This variable contains the suffixed string for bug report's filenames that are downloaded
* We should return True in FetchBugReports when at least even one bug report is downloaded.
* Threads look stable now. :-)   --- Changed the warning message to the "limitation of number of threads" => "to the amount of available bandwidth"
* Check for bug report files should be done by checking the start with package name and ending with pypt_bug_file_format string variable
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-07-27 20:21:19 UTC (rev 158)
+++ trunk/pypt_core.py	2007-07-27 20:57:31 UTC (rev 159)
@@ -41,6 +41,7 @@
 supported_platforms = ["Linux", "GNU/kFreeBSD", "GNU"]
 apt_update_target_path = '/var/lib/apt/lists/'
 apt_package_target_path = '/var/cache/apt/archives/'
+pypt_bug_file_format = "__pypt__bug__report"
 # Dummy paths while testing on Windows
 #apt_update_target_path = 'C:\\temp'
 #apt_package_target_path = 'C:\\temp'
@@ -330,11 +331,12 @@
 
 
 class FetchBugReports(Archiver):
-    def __init__(self, bugTypes=["Resolved bugs", "Normal bugs", "Minor bugs", "Wishlist items", "FIXED"], lock=False, ArchiveFile=None):
+    def __init__(self, pypt_bug_file_format, bugTypes=["Resolved bugs", "Normal bugs", "Minor bugs", "Wishlist items", "FIXED"], lock=False, ArchiveFile=None):
         
         self.bugsList = []
         self.bugTypes = bugTypes
         self.lock = lock
+        self.pypt_bug = pypt_bug_file_format
         
         if self.lock:
             Archiver.__init__(self, lock)
@@ -361,9 +363,12 @@
 	    return False
         
         if num_of_bugs:
+            atleast_one_bug_report_downloaded = False
             for x in self.bugs_list:
                 (sub_bugs_header, sub_bugs_list) = x
                 
+                #INFO: We filter all the bugTypes that we think aren't necessary.
+                # We don't download those low priority bug reports
                 for BugType in self.bugTypes:
                     if BugType in sub_bugs_header:
                         bug_flag = 0
@@ -380,7 +385,7 @@
                         except socket.timeout:
                             return False
                         if Filename == None:
-                            self.fileName = PackageName + "." + bug_num
+                            self.fileName = PackageName + "." + bug_num + "." + self.pypt_bug
                             file_handle = open(self.fileName, 'w')
                         else:
                             file_handle = open(Filename, 'a')
@@ -397,7 +402,9 @@
                         #We're adding to an archive file here.
                         if self.lock:
                             self.AddToArchive(self.ArchiveFile)
-            if bug_flag:
+                        
+                        atleast_one_bug_report_downloaded = True
+            if atleast_one_bug_report_downloaded:
                 return True
             else:
                 return False
@@ -637,9 +644,9 @@
     
     if ArgumentOptions.deb_bugs:
         if ArgumentOptions.zip_it:
-            FetchBugReportsDebian = FetchBugReports(lock=True, ArchiveFile=ArgumentOptions.zip_upgrade_file)
+            FetchBugReportsDebian = FetchBugReports(pypt_bug_file_format, ArgumentOptions.zip_upgrade_file, lock=True)
         else:
-            FetchBugReportsDebian = FetchBugReports()
+            FetchBugReportsDebian = FetchBugReports(pypt_bug_file_format)
     
     if ArgumentOptions.download_dir is None:
         if os.access("pypt-downloads", os.W_OK) is True:
@@ -794,8 +801,8 @@
                             else:
                                 #Copy the bug report to the target download_path folder
                                 if bug_fetched == 1:
-                                    for x in os.listdir(os.curdir()):
-                                        if x.startswith(PackageName):
+                                    for x in os.listdir(os.curdir):
+                                        if (x.startswith(PackageName) and x.endswith(pypt_bug_file_format) ):
                                             shutil.move(x, download_path)
                                             log.verbose("Moved %s file to %s folder.\n" % (x, download_path) )
                                 
@@ -805,7 +812,8 @@
     else:
         #INFO: Thread Support
         if ArgumentOptions.num_of_threads > 1:
-            log.msg("WARNING: Threads is still in beta stage. It's better to use just a single thread at the moment.\n\n")
+            log.msg("WARNING: If you are on a slow connection, it is good to limit the number of threads to a low number like 2.\n")
+            log.msg("WARNING: Else higher number of threads executed could cause network congestion and timeouts.\n\n")
             
         def run(request, response, func=find_first_match):
             '''Get items from the request Queue, process them
@@ -895,8 +903,8 @@
                                         log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, download_path) )
                                     
                                     if bug_fetched == 1:
-                                        for x in os.listdir(os.curdir()):
-                                            if x.startswith(PackageName):
+                                        for x in os.listdir(os.curdir):
+                                            if (x.startswith(PackageName) and x.endswith(pypt_bug_file_format) ):
                                                 shutil.move(x, download_path)
                                                 log.verbose("Moved %s file to %s folder.\n" % (x, download_path) )
                                         
@@ -963,8 +971,8 @@
                                     
                                 # And also the bug reports
                                 if bug_fetched == 1:
-                                    for x in os.listdir(os.curdir()):
-                                        if x.startswith(PackageName):
+                                    for x in os.listdir(os.curdir):
+                                        if (x.startswith(PackageName) and x.endswith(pypt_bug_file_format) ):
                                             shutil.move(x, download_path)
                                             log.verbose("Moved %s file to %s folder.\n" % (x, download_path) )
                                         
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-27 21:13:18
       | 
| Revision: 160
          http://pypt-offline.svn.sourceforge.net/pypt-offline/?rev=160&view=rev
Author:   riteshsarraf
Date:     2007-07-27 14:13:20 -0700 (Fri, 27 Jul 2007)
Log Message:
-----------
Display the warning when the number of threads is greater than 2
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-07-27 20:57:31 UTC (rev 159)
+++ trunk/pypt_core.py	2007-07-27 21:13:20 UTC (rev 160)
@@ -811,7 +811,7 @@
                     
     else:
         #INFO: Thread Support
-        if ArgumentOptions.num_of_threads > 1:
+        if ArgumentOptions.num_of_threads > 2:
             log.msg("WARNING: If you are on a slow connection, it is good to limit the number of threads to a low number like 2.\n")
             log.msg("WARNING: Else higher number of threads executed could cause network congestion and timeouts.\n\n")
             
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 | 
| 
      
      
      From: <rit...@us...> - 2007-07-29 13:38:18
       | 
| Revision: 161
          http://pypt-offline.svn.sourceforge.net/pypt-offline/?rev=161&view=rev
Author:   riteshsarraf
Date:     2007-07-29 06:38:20 -0700 (Sun, 29 Jul 2007)
Log Message:
-----------
* We need spaces to overwrite the area which gets left when the progressbar is running. Ugly but that's what it is currently :-(
Modified Paths:
--------------
    trunk/pypt_core.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py	2007-07-27 21:13:20 UTC (rev 160)
+++ trunk/pypt_core.py	2007-07-29 13:38:20 UTC (rev 161)
@@ -41,12 +41,17 @@
 supported_platforms = ["Linux", "GNU/kFreeBSD", "GNU"]
 apt_update_target_path = '/var/lib/apt/lists/'
 apt_package_target_path = '/var/cache/apt/archives/'
-pypt_bug_file_format = "__pypt__bug__report"
 # Dummy paths while testing on Windows
 #apt_update_target_path = 'C:\\temp'
 #apt_package_target_path = 'C:\\temp'
+
+pypt_bug_file_format = "__pypt__bug__report"
+
+#These are spaces which will overwrite the progressbar left mess
+LINE_OVERWRITE_MID = " " * 30
+LINE_OVERWRITE_FULL = " " * 60
+LINE_OVERWRITE_SMALL = " " * 15
        
-       
 class MD5Check:
     
     def md5_string(self, data):
@@ -597,7 +602,7 @@
     # and better document them the next time you find it out.
     # 13 is for "Permission Denied" when you don't have privileges to access the destination 
     if errno in error_codes:
-        log.err("%s - %s - %s\n" % (filename, errno, errormsg))
+        log.err("%s - %s - %s.%s\n" % (filename, errno, errormsg, LINE_OVERWRITE_MID))
         log.verbose(" Will still try with other package uris\n\n")
         pass
     
@@ -850,13 +855,13 @@
                         log.msg("Downloading %s\n" % (PackageName) ) 
                         
                         if FetcherInstance.download_from_web(url, file, download_path) == True:
-                            log.success("\r%s done.%s\n" % (PackageName, " "* 60) )
+                            log.success("\r%s done.%s\n" % (PackageName, LINE_OVERWRITE_FULL) )
                             if ArgumentOptions.zip_it:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
-                                    log.err("Couldn't archive %s to file %s.\n" % (file, ArgumentOptions.zip_update_file) )
+                                    log.err("Couldn't archive %s to file %s.%s\n" % (file, ArgumentOptions.zip_update_file, LINE_OVERWRITE_MID) )
                                     sys.exit(1)
                                 else:
-                                    log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_update_file) )
+                                    log.verbose("%s added to archive %s.%s\n" % (file, ArgumentOptions.zip_update_file, LINE_OVERWRITE_FULL) )
                                     os.unlink(os.path.join(download_path, file) )
                         else:
                             errlist.append(file)
@@ -874,23 +879,23 @@
                         if ArgumentOptions.disable_md5check is False:
                             
                             if FetcherInstance.md5_check(full_file_path, checksum) is True:
-                                log.verbose("md5checksum correct for package %s.\n" % (PackageName) )
+                                log.verbose("md5checksum correct for package %s.%s\n" % (PackageName, LINE_OVERWRITE_FULL) )
                                 
                                 if ArgumentOptions.deb_bugs:
                                     bug_fetched = 0
-                                    log.verbose("Fetching bug reports for package %s.\n" % (PackageName) )
+                                    log.verbose("Fetching bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_FULL) )
                                     if FetchBugReportsDebian.FetchBugsDebian(PackageName):
-                                        log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                        log.verbose("Fetched bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_FULL) )
                                         bug_fetched = 1
                                     else:
-                                        log.verbose("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
+                                        log.verbose("Couldn't fetch bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_MID) )
                                 
                                 if ArgumentOptions.zip_it:
                                     
                                     if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, full_file_path) is True:
-                                        log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
+                                        log.success("%s copied from local cache directory %s.%s\n" % (PackageName, cache_dir, LINE_OVERWRITE_MID) )
                                     else:
-                                        log.err("Couldn't add %s to archive %s.\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                        log.err("Couldn't add %s to archive %s.%s\n" % (file, ArgumentOptions.zip_upgrade_file, LINE_OVERWRITE_MID) )
                                         sys.exit(1)
                                             
                                 #INFO: If no zip option enabled, simply copy the downloaded package file
@@ -898,45 +903,45 @@
                                 else:
                                     try:
                                         shutil.copy(full_file_path, download_path)
-                                        log.success("%s copied from local cache directory %s\n" % (PackageName, cache_dir) )
+                                        log.success("%s copied from local cache directory %s.%s\n" % (PackageName, cache_dir, LINE_OVERWRITE_MID) )
                                     except shutil.Error:
-                                        log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, download_path) )
+                                        log.verbose("%s already available in %s. Skipping copy!!!%s\n\n" % (file, download_path, LINE_OVERWRITE_MID) )
                                     
                                     if bug_fetched == 1:
                                         for x in os.listdir(os.curdir):
                                             if (x.startswith(PackageName) and x.endswith(pypt_bug_file_format) ):
                                                 shutil.move(x, download_path)
-                                                log.verbose("Moved %s file to %s folder.\n" % (x, download_path) )
+                                                log.verbose("Moved %s file to %s folder.%s\n" % (x, download_path, LINE_OVERWRITE_FULL) )
                                         
                             #INFO: Damn!! The md5chesum didn't match :-(
                             # The file is corrupted and we need to download a new copy from the internet
                             else:
-                                log.verbose("%s MD5 checksum mismatch. Skipping file.\n" % (file) )
-                                log.msg("Downloading %s - %d KB\n" % (PackageName, download_size/1024) )
+                                log.verbose("%s MD5 checksum mismatch. Skipping file.%s\n" % (file, LINE_OVERWRITE_FULL) )
+                                log.msg("Downloading %s - %d KB%s\n" % (PackageName, download_size/1024, LINE_OVERWRITE_FULL) )
                                 if FetcherInstance.download_from_web(url, file, download_path) == True:
-                                    log.success("\r%s done.%s\n" % (PackageName, " "* 60) )
+                                    log.success("\r%s done.%s\n" % (PackageName, LINE_OVERWRITE_FULL) )
                                     
                                     #Add to cache_dir if possible
                                     if ArgumentOptions.cache_dir:
                                         try:
                                             shutil.copy(file, cache_dir)
-                                            log.verbose("%s copied to local cache directory %s\n" % (file, ArgumentOptions.cache_dir) )
+                                            log.verbose("%s copied to local cache directory %s.%s\n" % (file, ArgumentOptions.cache_dir, LINE_OVERWRITE_MID) )
                                         except shutil.Error:
-                                            log.verbose("Couldn't copy %s  to %s\n\n" % (file, ArgumentOptions.cache_dir) )
+                                            log.verbose("Couldn't copy %s to %s.%s\n\n" % (file, ArgumentOptions.cache_dir, LINE_OVERWRITE_FULL) )
                                             
                                     #Fetch bug reports
                                     if ArgumentOptions.deb_bugs:
                                         if FetchBugReportsDebian.FetchBugsDebian(PackageName):
-                                            log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                            log.verbose("Fetched bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_MID) )
                                         else:
-                                            log.err("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
+                                            log.verbose("Couldn't fetch bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_MID) )
                                             
                                     if ArgumentOptions.zip_it:
                                         if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
-                                            log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                            log.err("Couldn't archive %s to file %s.%s\n" % (file, ArgumentOptions.zip_upgrade_file, LINE_OVERWRITE_SMALL) )
                                             sys.exit(1)
                                         else:
-                                            log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                            log.verbose("%s added to archive %s.%s\n" % (file, ArgumentOptions.zip_upgrade_file, LINE_OVERWRITE_SMALL) )
                                             os.unlink(os.path.join(download_path, file) )
                                         
                         #INFO: You're and idiot.
@@ -945,10 +950,10 @@
                             if ArgumentOptions.deb_bugs:
                                 bug_fetched = 0
                                 if FetchBugReportsDebian.FetchBugsDebian(PackageName):
-                                    log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                    log.verbose("Fetched bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_MID) )
                                     bug_fetched = 1
                                 else:
-                                    log.verbose("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
+                                    log.verbose("Couldn't fetch bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_MID) )
                                     
                             #FIXME: Don't know why this was really required. If this has no changes, delete it.
                             #file = full_file_path.split("/")
@@ -956,31 +961,31 @@
                             #file = download_path + "/" + file
                             if ArgumentOptions.zip_it:
                                 if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
-                                    log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                    log.err("Couldn't archive %s to file %s.%s\n" % (file, ArgumentOptions.zip_upgrade_file, LINE_OVERWRITE_SMALL) )
                                     sys.exit(1)
                                 else:
-                                    log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                    log.verbose("%s added to archive %s.%s\n" % (file, ArgumentOptions.zip_upgrade_file, LINE_OVERWRITE_SMALL) )
                                     os.unlink(os.path.join(download_path, file) )
                             else:
                                 # Since zip file option is not enabled let's copy the file to the target folder
                                 try:
                                     shutil.copy(full_file_path, download_path)
-                                    log.success("%s copied from local cache directory %s\n" % (file, cache_dir) )
+                                    log.success("%s copied from local cache directory %s.%s\n" % (file, cache_dir, LINE_OVERWRITE_SMALL) )
                                 except shutil.Error:
-                                    log.verbose("%s already available in dest_dir. Skipping copy!!!\n\n" % (file) )
+                                    log.verbose("%s already available in dest_dir. Skipping copy!!!%s\n\n" % (file, LINE_OVERWRITE_SMALL) )
                                     
                                 # And also the bug reports
                                 if bug_fetched == 1:
                                     for x in os.listdir(os.curdir):
                                         if (x.startswith(PackageName) and x.endswith(pypt_bug_file_format) ):
                                             shutil.move(x, download_path)
-                                            log.verbose("Moved %s file to %s folder.\n" % (x, download_path) )
+                                            log.verbose("Moved %s file to %s folder.%s\n" % (x, download_path, LINE_OVERWRITE_MID) )
                                         
                     else:
                         #INFO: This block gets executed if the file is not found in local cache_dir or cache_dir is None
                         # We go ahead and try to download it from the internet
-                        log.verbose("%s not available in local cache %s.\n" % (file, ArgumentOptions.cache_dir) )
-                        log.msg("Downloading %s - %d KB\n" % (PackageName, download_size/1024) )
+                        log.verbose("%s not available in local cache %s.%s\n" % (file, ArgumentOptions.cache_dir, LINE_OVERWRITE_MID) )
+                        log.msg("Downloading %s - %d KB%s\n" % (PackageName, download_size/1024, LINE_OVERWRITE_FULL) )
                         if FetcherInstance.download_from_web(url, file, download_path) == True:
                             
                             #INFO: This block gets executed if md5checksum is allowed
@@ -990,40 +995,40 @@
                                     if ArgumentOptions.cache_dir:
                                         try:
                                             shutil.copy(file, ArgumentOptions.cache_dir)
-                                            log.verbose("%s copied to local cache directory %s\n" % (file, ArgumentOptions.cache_dir) )
+                                            log.verbose("%s copied to local cache directory %s.%s\n" % (file, ArgumentOptions.cache_dir, LINE_OVERWRITE_MID) )
                                         except shutil.Error:
-                                            log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, ArgumentOptions.cache_dir) )
+                                            log.verbose("%s already available in %s. Skipping copy!!!%s\n\n" % (file, ArgumentOptions.cache_dir, LINE_OVERWRITE_MID) )
                                             
                                     if ArgumentOptions.deb_bugs:
                                         if FetchBugReportsDebian.FetchBugsDebian(PackageName):
-                                            log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                            log.verbose("Fetched bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_MID) )
                                         else:
-                                            log.err("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
+                                            log.verbose("Couldn't fetch bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_MID) )
                                             
                                     if ArgumentOptions.zip_it:
                                         if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
-                                            log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                            log.err("Couldn't archive %s to file %s.%s\n" % (file, ArgumentOptions.zip_upgrade_file, LINE_OVERWRITE_SMALL) )
                                             sys.exit(1)
                                         else:
-                                            log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                            log.verbose("%s added to archive %s.%s\n" % (file, ArgumentOptions.zip_upgrade_file, LINE_OVERWRITE_SMALL) )
                                             os.unlink(os.path.join(download_path, file) )
                                             
                             else:
                                 if ArgumentOptions.deb_bugs:
                                     if FetchBugReportsDebian.FetchBugsDebian(PackageName):
-                                        log.verbose("Fetched bug reports for package %s.\n" % (PackageName) )
+                                        log.verbose("Fetched bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_MID) )
                                     else:
-                                        log.err("Couldn't fetch bug reports for package %s.\n" % (PackageName) )
+                                        log.verbose("Couldn't fetch bug reports for package %s.%s\n" % (PackageName, LINE_OVERWRITE_MID) )
                                         
                                 if ArgumentOptions.zip_it:
                                     if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
-                                        log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                        log.err("Couldn't archive %s to file %s.%s\n" % (file, ArgumentOptions.zip_upgrade_file, LINE_OVERWRITE_SMALL) )
                                         sys.exit(1)
                                     else:
-                                        log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+                                        log.verbose("%s added to archive %s.%s\n" % (file, ArgumentOptions.zip_upgrade_file, LINE_OVERWRITE_SMALL) )
                                         os.unlink(os.path.join(download_path, file) )
                                             
-                            log.success("\r%s done.%s\n" % (PackageName, " "* 60) )
+                            log.success("\r%s done.%s\n" % (PackageName, LINE_OVERWRITE_FULL) )
                         else:
                             #log.err("Couldn't find %s\n" % (PackageName) )
                             errlist.append(PackageName)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
 |