[Pypt-offline-general] SF.net SVN: pypt-offline: [127] trunk
Status: Beta
Brought to you by:
riteshsarraf
|
From: <rit...@us...> - 2007-03-12 13:45:51
|
Revision: 127
http://svn.sourceforge.net/pypt-offline/?rev=127&view=rev
Author: riteshsarraf
Date: 2007-03-12 06:45:51 -0700 (Mon, 12 Mar 2007)
Log Message:
-----------
* This is again a heavy re-write of the existing code.
* Lot of things are being re-organized.
* fetcher() has been completely re-written.
* Now supports simultaneous downlods of both update/upgrade files
Modified Paths:
--------------
trunk/pypt_core.py
Removed Paths:
-------------
trunk/pypt_logger.py
trunk/pypt_md5_check.py
trunk/pypt_progressbar.py
trunk/pypt_variables.py
Modified: trunk/pypt_core.py
===================================================================
--- trunk/pypt_core.py 2007-02-11 09:43:21 UTC (rev 126)
+++ trunk/pypt_core.py 2007-03-12 13:45:51 UTC (rev 127)
@@ -1,111 +1,281 @@
-import os, shutil, string, sys, urllib2, Queue, threading, platform
-import pypt_progressbar, pypt_md5_check, pypt_variables, pypt_logger, pypt_progressbar
+import os
+import sys
+import shutil
+import platform
+import string
+import urllib2
+import Queue
+import threading
+import signal
+import optparse
+import array
+from array import array
+
+#INFO: They aren't on Windows
+try:
+ from fcntl import ioctl
+ import termios
+except ImportError:
+ pass
+
'''This is the core module. It does the main job of downloading packages/update packages,\nfiguring out if the packages are in the local cache, handling exceptions and many more stuff'''
-def compress_the_file(zip_file_name, files_to_compress, download_dir):
- '''Condenses all the files into one single file for easy transfer'''
+
+version = "0.6.4"
+copyright = "(C) 2005 - 2007 Ritesh Raj Sarraf - RESEARCHUT (http://www.researchut.com/)"
+
+errlist = []
+supported_platforms = ["Linux", "GNU/kFreeBSD", "GNU"]
+apt_update_target_path = '/var/lib/apt/lists/'
+apt_package_target_path = '/var/cache/apt/archives/'
+# Dummy paths while testing on Windows
+#apt_update_target_path = 'C:\\temp'
+#apt_package_target_path = 'C:\\temp'
+
+
+class MD5Check:
- try:
- import zipfile
- except ImportError:
- log.err("Aieee!! Module not found.\n")
+ def md5_string(data):
+ hash = md5.new()
+ hash.update(data.read())
+ return hash.hexdigest()
+
+ def md5_check(file, checksum):
+ data = open(file, 'rb')
+ #local = md5_string(data)
+ if checksum == md5_string(data):
+ return True
+ return False
- try:
- os.chdir(download_dir)
- except:
- #TODO: Handle this exception
- log.err("Aieeee! I got a fatal exception that I don't understand.\nPlease debug.\n")
+class ProgressBar(object):
+ def __init__(self, minValue = 0, maxValue = 0, width = None, fd = sys.stderr):
+ #width does NOT include the two places for [] markers
+ self.min = minValue
+ self.max = maxValue
+ self.span = float(self.max - self.min)
+ self.fd = fd
+ self.signal_set = False
+ if width is None:
+ try:
+ self.handle_resize(None, None)
+ signal.signal(signal.SIGWINCH, self.handle_resize)
+ self.signal_set = True
+ except:
+ self.width = 79 #The standard
+ else:
+ self.width = width
+ self.value = self.min
+ self.items = 0 #count of items being tracked
+ self.complete = 0
+
+ def handle_resize(self, signum, frame):
+ h,w=array('h', ioctl(self.fd,termios.TIOCGWINSZ,'\0'*8))[:2]
+ self.width = w
- try:
- filename = zipfile.ZipFile(zip_file_name, "a")
- except IOError:
- #INFO By design zipfile throws an IOError exception when you open
- # in "append" mode and the file is not present.
- filename = zipfile.ZipFile(zip_file_name, "w")
- except:
- #TODO Handle the exception
- log.err("\nAieee! Some error exception in creating zip file %s\n" % (zip_file_name))
- sys.exit(1)
+ def updateValue(self, newValue):
+ #require caller to supply a value! newValue is the increment from last call
+ self.value = max(self.min, min(self.max, self.value + newValue))
+ self.display()
- filename.write(files_to_compress, files_to_compress, zipfile.ZIP_DEFLATED)
- filename.close()
+ def completed(self):
+ self.complete = self.complete + 1
+ if self.signal_set:
+ signal.signal(signal.SIGWINCH, signal.SIG_DFL)
+ self.display()
+
+ def addItem(self, maxValue):
+ self.max = self.max + maxValue
+ self.span = float(self.max - self.min)
+ self.items = self.items + 1
+ self.display()
+
+ def display(self):
+ print "\r%3s/%3s items: %s\r" % (self.complete, self.items, str(self)),
+
+ def __str__(self):
+ #compute display fraction
+ percentFilled = ((self.value - self.min) / self.span)
+ widthFilled = int(self.width * percentFilled + 0.5)
+ return ("[" + "#"*widthFilled + " "*(self.width - widthFilled) + "]" + " %5.1f%% of %d KB" % (percentFilled * 100.0, self.max/1024))
-def decompress_the_file(file, path, filename, archive_type):
- '''Extracts all the files from a single condensed archive file'''
+class Log:
+ '''A OOP implementation for logging.
+ warnings is to tackle the warning option
+ verbose is to tackle the verbose option
+ debug is to tackle the debug option
+ You should pass these options, taking it from optparse/getopt,
+ during instantiation'''
- if archive_type is 1:
- try:
- import bz2
- except ImportError:
- log.err("Aieeee! Module bz2 is not available.\n")
+ def __init__(self, warnings, verbose, debug):
+
+ if warnings is True:
+ self.WARN = True
+ else: self.WARN = False
+
+ if verbose is True:
+ self.VERBOSE = True
+ else: self.VERBOSE = False
+
+ if debug is True:
+ self.DEBUG = True
+ else: self.DEBUG = False
+
+ def msg(self, msg):
+ sys.stdout.write(msg)
+ sys.stdout.flush()
+
+ def err(self, msg):
+ sys.stderr.write(msg)
+ sys.stderr.flush()
+
+ # For the rest, we need to check the options also
+ def warn(self, msg):
+ if self.WARN is True:
+ #if options.warnings is True:
+ sys.stderr.write(msg)
+ sys.stderr.flush()
+
+ def verbose(self, msg):
+ if self.VERBOSE is True:
+ #if options.verbose is True:
+ sys.stdout.write(msg)
+ sys.stdout.flush()
- try:
- fh = bz2.BZ2File(file, 'r')
- except:
- log.err("Couldn't open file %s for reading.\n" % (file))
+ def debug(self, msg):
+ if self.DEBUG is True:
+ #if options.debug is True:
+ sys.stdout.write(msg)
+ sys.stdout.flush()
+class Archiver:
+ def __init__(self, lock=None):
+ if lock is None or lock != 1:
+ self.ziplock = False
+ else:
+ self.ZipLock = threading.Lock()
+ self.lock = True
+
+ def TarGzipBZ2_Uncompress(self, SourceFileHandle, TargetFileHandle):
try:
- wr_fh = open (os.path.join(path, filename), 'wb')
- except:
- log.err("Couldn't open file %s at path %s for writing.\n" % (filename, path))
-
- try:
- wr_fh.write(fh.read())
- except EOFError, e:
- log.err("Bad file %s\n%s" % (file, e))
+ TargetFileHandle.write(SourceFileHandle.read() )
+ except EOFError:
pass
+ return True
- wr_fh.close()
- fh.close()
- log.msg("%s file synced\n" % (filename))
+ def compress_the_file(self, zip_file_name, files_to_compress):
+ '''Condenses all the files into one single file for easy transfer'''
- elif archive_type is 2:
try:
- import gzip
+ import zipfile
except ImportError:
- log.err("Aieee! Module gzip is not available.\n")
-
+ return False
+
try:
- fh = gzip.GzipFile(file, 'r')
- except:
- log.err("Couldn't open file %s for reading.\n" % (file))
+ if self.lock:
+ self.ZipLock.acquire()
- try:
- wr_fh = open(os.path.join(path,filename), 'wb')
- except:
- log.err("Couldn't open file %s at path %s for writing.\n" % (filename, path))
+ filename = zipfile.ZipFile(zip_file_name, "a")
+ except IOError:
+ #INFO: By design zipfile throws an IOError exception when you open
+ # in "append" mode and the file is not present.
+ filename = zipfile.ZipFile(zip_file_name, "w")
+ #except:
+ #TODO Handle the exception
+ #return False
+
+ filename.write(files_to_compress, files_to_compress, zipfile.ZIP_DEFLATED)
+ filename.close()
+ if self.lock:
+ self.ZipLock.release()
+ return True
- try:
- wr_fh.write(fh.read())
- except EOFError, e:
- log.err("Bad file %s\n%s" % (file, e))
- pass
+ def decompress_the_file(self, archive_file, path, target_file, archive_type):
+ '''Extracts all the files from a single condensed archive file'''
- wr_fh.close()
- fh.close()
- log.msg("%s file synced\n" % (filename))
- elif archive_type is 3:
- try:
- zip_file = zipfile.ZipFile(file, 'rb')
- except:
- #TODO: Handle the exceptions
- log.err("\nAieee! Some error exception in reading the zip file %s\n" % (file))
- return False
+ if archive_type is 1:
+ try:
+ import bz2
+ except ImportError:
+ return False
- for filename in zip_file.namelist():
- data = zip_file.read()
+ try:
+ read_from = bz2.BZ2File(archive_file, 'r')
+ except:
+ return False
+
+ try:
+ write_to = open (os.path.join(path, filename), 'wb')
+ except:
+ return False
+
+ if TarGzipBZ2_Uncomprerssed(read_from, write_to) != True:
+ raise ArchiveError
- zip_file.close()
+ write_to.close()
+ read_from.close()
+ return True
+
+ elif archive_type is 2:
+ try:
+ import gzip
+ except ImportError:
+ return False
+
+ try:
+ read_from = gzip.GzipFile(file, 'r')
+ except:
+ return False
+
+ try:
+ write_to = open(os.path.join(path,filename), 'wb')
+ except:
+ return False
+
+ if TarGzipBZ2_Uncomprerssed(read_from, write_to) != True:
+ raise ArchiveError
+
+ write_to.close()
+ read_from.close()
+ return True
+
+ elif archive_type is 3:
+ # FIXME: This looks odd. Where are we writing to a file ???
+ try:
+ zip_file = zipfile.ZipFile(file, 'rb')
+ except:
+ return False
+
+ for filename in zip_file.namelist():
+ data = zip_file.read()
+
+ zip_file.close()
+ return True
+ else:
+ return False
+
+
+def files(self, root):
+ for path, folders, files in os.walk(root):
+ for file in files:
+ yield path, file
+
+def find_first_match(cache_dir=None, filename=None):
+ '''Return the full path of the filename if a match is found
+ Else Return False'''
+
+ # Do the sanity check first
+ if cache_dir is None or filename is None or os.path.isdir(cache_dir) is False:
+ return False
else:
- log.err("Aieeee! %s is unknown archive.\n" % (file))
- return False
-
- return True
-
-def download_from_web(url, file, download_dir, checksum, number_of_threads, thread_name):
+ for path, file in files(cache_dir):
+ if file == filename:
+ return os.path.join(path, file)
+ return False
+
+def download_from_web(url, file, download_dir, ProgressBarInstance):
'''
Download the required file from the web
The arguments are passed everytime to the function so that,
@@ -123,29 +293,19 @@
size = int(headers['Content-Length'])
data = open(file,'wb')
- progbar.addItem(size)
+ ProgressBarInstance.addItem(size)
- log.msg("Downloading %s - %d KB\n" % (file, size/1024))
+
while i < size:
data.write (temp.read(block_size))
increment = min(block_size, size - i)
i += block_size
counter += 1
- progbar.updateValue(increment)
- progbar.completed()
- log.msg("\r%s %s done.\n" % (file, " "))
+ ProgressBarInstance.updateValue(increment)
+ ProgressBarInstance.completed()
data.close()
temp.close()
- #INFO: Do an md5 checksum
- if pypt_variables.options.disable_md5check == True:
- pass
- else:
- if pypt_md5_check.md5_check(file, checksum, download_dir) != True:
- os.unlink(file)
- log.err("%s checksum mismatch. File removed\n" % (file))
- return False
- log.verbose("%s successfully downloaded from %s\n\n" % (file, url))
return True
#FIXME: Find out optimal fix for this exception handling
@@ -172,52 +332,6 @@
if hasattr(e, 'code') and hasattr(e, 'reason'):
errfunc(e.code, e.reason, file)
-#TODO: walk_tree_copy_debs - DEPRECATED
-# This might require simplification and optimization.
-# But for now it's doing the job.
-# Need to find a better algorithm, maybe os.walk()
-def walk_tree_copy_debs(cache, sFile, sSourceDir):
- '''
- This function checks for a package to see if its already downloaded
- It can search directories with depths.
- '''
- #The core algorithm is here for the whole program to function'\n'
- #It recursively searches a tree/subtree of folders for package files'\n'
- #like the directory structure of "apt-proxy". If files are found (.deb || .rpm)'\n'
- #it checks wether they are on the list of packages to be fetched. If yes,'\n\
- #it copies them. Same goes for flat "apt archives folders" also.'\n'
- #Else it fetches the package from the net"""
- bFound = False
- try:
- if cache is not None:
- for name in os.listdir(cache) and bFound == True:
- #if bFound == True:
- # break
- path = os.path.join(cache, name)
- if os.path.isdir(path):
- walk_tree_copy_debs(path, sFile, sSourceDir)
- #walk_tree_copy_debs(path, sFile)
- elif name.endswith('.deb') or name.endswith('.rpm'):
- if name == sFile:
- try:
- shutil.copy(path, sSourceDir)
- except IOError, (errno, errstring):
- errfunc(errno, errstring)
- except shutil.Error:
- log.msg("%s is available in %s. Skipping Copy!\n" % (name, sSourceDir))
- bFound = True
- break
-
- #shutil.copy(path, sSourceDir)
- #bFound = True
- #break
- #return bFound
- #return False
- except OSError, (errno, strerror):
- log.err("%s %s\n" % (errno, strerror))
- errfunc(errno, strerror)
-
-
def files(root):
for path, folders, files in os.walk(root):
for file in files:
@@ -267,7 +381,7 @@
item = item.split(' ')
url = string.rstrip(string.lstrip(''.join(item[0]), chars="'"), chars="'")
file = string.rstrip(string.lstrip(''.join(item[1]), chars="'"), chars="'")
- size = string.rstrip(string.lstrip(''.join(item[2]), chars = "'"), chars="'")
+ size = int(string.rstrip(string.lstrip(''.join(item[2]), chars = "'"), chars="'"))
#INFO: md5 ends up having '\n' with it.
# That needs to be stripped too.
md5_text = string.rstrip(string.lstrip(''.join(item[3]), chars = "'"), chars = "'")
@@ -323,7 +437,7 @@
log.err("Aieee! I don't understand this errorcode\n" % (errno))
sys.exit(errno)
-def fetcher(url_file, download_dir, cache_dir, zip_bool, zip_type_file, arg_type = 0):
+def fetcher(ArgumentOptions, arg_type = None):
'''
uri - The uri data whill will contain the information
path - The path (if any) where the download needs to be done
@@ -331,285 +445,290 @@
arg_type - arg_type is basically used to identify wether it's a update download or upgrade download
'''
+ cache_dir = ArgumentOptions.cache_dir
+ zip_bool = ArgumentOptions.zip_it
+
+ class FetcherClass(ProgressBar, Archiver, MD5Check):
+ def __init__(self, width, lock):
+ ProgressBar.__init__(self, width=width)
+ #ProgressBar.__init__(self, width)
+ #self.width = width
+ Archiver.__init__(self, lock=lock)
+ #self.lock = lock
+
+ #global FetcherInstance
+ FetcherInstance = FetcherClass(width=30, lock=True)
#INFO: For the Progress Bar
- global progbar
- progbar = pypt_progressbar.ProgressBar(width = 30)
+ #progbar = ProgressBar(width = 30)
- if arg_type == 1:
- #INFO: Oh! We're only downloading the update package list database
- # Package Update database changes almost daily in Debian.
- # This is at least true for Sid. Hence it doesn't make sense to copy
- # update packages' database from a cache.
-
- if download_dir is None:
- if os.access("pypt-downloads", os.W_OK) is True:
- download_dir = os.path.abspath("pypt-downloads")
- else:
- try:
- os.umask(0002)
- os.mkdir("pypt-downloads")
- download_dir = os.path.abspath("pypt-downloads")
- except:
- log.err("Aieeee! I couldn't create a directory")
- errfunc(1, '')
+ if ArgumentOptions.download_dir is None:
+ if os.access("pypt-downloads", os.W_OK) is True:
+ download_path = os.path.abspath("pypt-downloads")
else:
- download_dir = os.path.abspath(download_dir)
+ try:
+ os.umask(0002)
+ os.mkdir("pypt-downloads")
+ download_path = os.path.abspath("pypt-downloads")
+ except:
+ log.err("Aieeee! I couldn't create a directory")
+ errfunc(1, '')
+ else:
+ download_path = os.path.abspath(ArgumentOptions.download_dir)
+
+ FetchData = {}
+ if ArgumentOptions.fetch_update:
+ try:
+ raw_data_list = open(ArgumentOptions.fetch_update, 'r').readlines()
+ except IOError, (errno, strerror):
+ log.err("%s %s\n" % (errno, strerror))
+ errfunc(errno, '')
- if os.access(os.path.join(download_dir, zip_type_file), os.F_OK):
- log.err("%s already present.\nRemove it first.\n" % (zip_type_file))
+ FetchData['Update'] = []
+ for item in raw_data_list:
+ FetchData['Update'].append(item)
+
+ if os.access(os.path.join(download_path, ArgumentOptions.zip_update_file), os.F_OK):
+ log.err("%s already present.\nRemove it first.\n" % (ArgumentOptions.zip_update_file) )
sys.exit(1)
+
+ if ArgumentOptions.fetch_upgrade:
try:
- raw_data_list = open(url_file, 'r').readlines()
+ raw_data_list = open(ArgumentOptions.fetch_upgrade, 'r').readlines()
except IOError, (errno, strerror):
log.err("%s %s\n" % (errno, strerror))
errfunc(errno, '')
+
+ FetchData['Upgrade'] = []
+ for item in raw_data_list:
+ FetchData['Upgrade'].append(item)
- #INFO: Mac OS is having issues with Python Threading.
- # Use the conventional model for Mac OS
- if sys.platform == 'darwin':
- log.verbose("Running on Mac OS. Python doesn't have proper support for Threads on Mac OS X.\n")
- log.verbose("Running in the conventional non-threaded way.\n")
- for each_single_item in raw_data_list:
- (url, file, download_size, checksum) = stripper(each_single_item)
- if download_from_web(url, file, download_dir, None) != True:
- pypt_variables.errlist.append(file)
- else:
- if zip_bool:
- compress_the_file(zip_type_file, file, download_dir)
- os.unlink(os.path.join(download_dir, file)) # Remove it because we don't need the file once it is zipped.
- else:
- #INFO: Thread Support
- if pypt_variables.options.num_of_threads > 1:
- log.msg("WARNING: Threads is still in alpha stage. It's better to use just a single thread at the moment.\n\n")
- log.warn("Threads is still in alpha stage. It's better to use just a single thread at the moment.\n\n")
-
- NUMTHREADS = pypt_variables.options.num_of_threads
- ziplock = threading.Lock()
+ if os.access(os.path.join(download_path, ArgumentOptions.zip_upgrade_file), os.F_OK):
+ log.err("%s already present.\nRemove it first.\n" % (ArgumentOptions.zip_upgrade_file) )
+ sys.exit(1)
- def run(request, response, func=download_from_web):
- '''Get items from the request Queue, process them
- with func(), put the results along with the
- Thread's name into the response Queue.
+ del raw_data_list
+
+
+ #INFO: Mac OS is having issues with Python Threading.
+ # Use the conventional model for Mac OS
+ if sys.platform == 'darwin':
+ log.verbose("Running on Mac OS! pypt-offline doesn't have proper support for Threads on Mac OS X.\n")
+ log.verbose("Running in the conventional non-threaded way.\n")
+
+ for key in FetchData.keys():
+ for item in FetchData.get(key):
- Stop running once an item is None.'''
-
- while 1:
- item = request.get()
- if item is None:
- break
- (url, file, download_size, checksum) = stripper(item)
- thread_name = threading.currentThread().getName()
- response.put((thread_name, url, file, func(url, file, download_dir, None, NUMTHREADS, thread_name)))
-
- # This will take care of making sure that if downloaded, they are zipped
- (thread_name, url, file, exit_status) = responseQueue.get()
- if exit_status == True:
- if zip_bool:
- ziplock.acquire()
- try:
- compress_the_file(zip_type_file, file, download_dir)
- os.unlink(os.path.join(download_dir, file)) # Remove it because we don't need the file once it is zipped.
- finally:
- ziplock.release()
- else:
- pypt_variables.errlist.append(file)
- #pass
-
- # Create two Queues for the requests and responses
- requestQueue = Queue.Queue()
- responseQueue = Queue.Queue()
-
- # Pool of NUMTHREADS Threads that run run().
- thread_pool = [
- threading.Thread(
- target=run,
- args=(requestQueue, responseQueue)
- )
- for i in range(NUMTHREADS)
- ]
-
- # Start the threads.
- for t in thread_pool: t.start()
-
- # Queue up the requests.
- for item in raw_data_list: requestQueue.put(item)
-
- # Shut down the threads after all requests end.
- # (Put one None "sentinel" for each thread.)
- for t in thread_pool: requestQueue.put(None)
-
- # Don't end the program prematurely.
- #
- # (Note that because Queue.get() is blocking by
- # defualt this isn't strictly necessary. But if
- # you were, say, handling responses in another
- # thread, you'd want something like this in your
- # main thread.)
- for t in thread_pool: t.join()
-
- if arg_type == 2:
- if download_dir is None:
- if os.access("pypt-downloads", os.W_OK) is True:
- download_dir = os.path.abspath("pypt-downloads")
- else:
- try:
- os.umask(0002)
- os.mkdir("pypt-downloads")
- download_dir = os.path.abspath("pypt-downloads")
- except:
- log.err("Aieeee! I couldn't create a directory")
- else:
- download_dir = os.path.abspath(download_dir)
-
- if os.access(os.path.join(download_dir, zip_type_file), os.F_OK):
- log.err("%s already present.\nRemove it first.\n" % (zip_type_file))
- sys.exit(1)
-
- if cache_dir is not None:
- cache_dir = os.path.abspath(cache_dir)
-
- try:
- raw_data_list = open(url_file, 'r').readlines()
- except IOError, (errno, strerror):
- log.err("%s %s\n" %(errno, strerror))
- errfunc(errno, '', url_file)
-
- #INFO: Mac OS X in mis-behaving with Python Threading
- # Use the conventional model for Mac OS X
- if sys.platform == 'darwin':
- log.verbose("Running on Mac OS. Python doesn't have proper support for Threads on Mac OS X.\n")
- log.verbose("Running in the conventional non-threaded way.\n")
- for each_single_item in raw_data_list:
(url, file, download_size, checksum) = stripper(each_single_item)
+ log.msg("Downloading %s\n" % (file) )
- if cache_dir is None:
- if download_from_web(url, file, download_dir, checksum) != True:
- pypt_variables.errlist.append(file)
+ if key == 'Update':
+ if download_from_web(url, file, download_path, FetcherInstance) != True:
+ errlist.append(file)
+ else:
+ log.msg("\r%s %s done.\n" % (file, " ") )
if zip_bool:
- compress_the_file(zip_type_file, file, download_dir)
- os.unlink(os.path.join(download_dir, file))
- else:
- if copy_first_match(cache_dir, file, download_dir, checksum) == False:
- if download_from_web(url, file, download_dir, checksum) != True:
- pypt_variables.errlist.append(file)
- else:
- if os.access(os.path.join(cache_dir, file), os.F_OK):
- log.debug("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir)) #INFO: The file is already there.
- log.verbose("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir))
+ if archive.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
+ log.verbose("%s added to archive %s.\n" % (file, ArgumentOptions.zip_update_file) )
+ os.unlink(os.path.join(download_path, file) ) # Remove it because we don't need the file once it is zipped.
+ sys.exit(1)
+ pass
+
+ elif key == 'Upgrade':
+ if cache_dir is None:
+ log.msg("Downloading %s - %d KB\n" % (file, size/1024))
+ if download_from_web(url, file, download_path, FetcherInstance) != True:
+ pypt_variables.errlist.append(file)
+ if zip_bool:
+ log.msg("\r%s %s done.\n" % (file, " "))
+ archive.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
+ os.unlink(os.path.join(download_path, file))
+ else:
+ if copy_first_match(cache_dir, file, download_path, checksum) == False:
+ log.msg("Downloading %s - %d KB\n" % (file, size/1024))
+ if download_from_web(url, file, download_path, FetcherInstance) != True:
+ errlist.append(file)
else:
- if os.access(cache_dir, os.W_OK):
- shutil.copy(file, cache_dir)
- log.verbose("%s copied to %s\n" % (file, cache_dir))
+ log.msg("\r%s %s done.\n" % (file, " "))
+ if os.access(os.path.join(cache_dir, file), os.F_OK):
+ log.debug("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir)) #INFO: The file is already there.
+ log.verbose("%s file is already present in cache-dir %s. Skipping copy.\n" % (file, cache_dir))
else:
- log.verbose("Cannot copy %s to %s. Is %s writeable??\n" % (file, cache_dir))
-
+ if os.access(cache_dir, os.W_OK):
+ shutil.copy(file, cache_dir)
+ log.verbose("%s copied to %s\n" % (file, cache_dir))
+ else:
+ log.verbose("Cannot copy %s to %s. Is %s writeable??\n" % (file, cache_dir))
+
+ if zip_bool:
+ archive.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
+ os.unlink(os.path.join(download_path, file))
+ elif True:
if zip_bool:
- compress_the_file(zip_type_file, file, download_dir)
- os.unlink(os.path.join(download_dir, file))
- elif True:
- if zip_bool:
- compress_the_file(zip_type_file, file, download_dir)
- os.unlink(os.path.join(download_dir, file))
- else:
- #INFO: Thread Support
- if pypt_variables.options.num_of_threads > 1:
- log.msg("WARNING: Threads is still in alpha stage. It's better to use just a single thread at the moment.\n")
- log.warn("Threads is still in alpha stage. It's better to use just a single thread at the moment.\n")
-
- NUMTHREADS = pypt_variables.options.num_of_threads
- ziplock = threading.Lock()
+ archive.compress_the_file(ArgumentOptions.zip_upgrade_file, file)
+ os.unlink(os.path.join(download_path, file))
+
+ else:
+ raise FetchDataKeyError
+
+ else:
+ #INFO: Thread Support
+ if ArgumentOptions.num_of_threads > 1:
+ log.msg("WARNING: Threads is still in beta stage. It's better to use just a single thread at the moment.\n\n")
+ log.warn("Threads is still in beta stage. It's better to use just a single thread at the moment.\n\n")
- def run(request, response, func=copy_first_match):
- '''Get items from the request Queue, process them
- with func(), put the results along with the
- Thread's name into the response Queue.
+ def run(request, response, func=find_first_match):
+ '''Get items from the request Queue, process them
+ with func(), put the results along with the
+ Thread's name into the response Queue.
+
+ Stop running when item is None.'''
+
+ while 1:
+ tuple_item_key = request.get()
+ if tuple_item_key is None:
+ break
+ (key, item) = tuple_item_key
+ (url, file, download_size, checksum) = stripper(item)
+ thread_name = threading.currentThread().getName()
- Stop running once an item is None.'''
-
- while 1:
- item = request.get()
- if item is None:
- break
- (url, file, download_size, checksum) = stripper(item)
- thread_name = threading.currentThread().getName()
- response.put((thread_name, url, file, func(cache_dir, file, download_dir, checksum)))
+ if key == 'Update':
- # This will take care of making sure that if downloaded, they are zipped
- (thread_name, url, file, exit_status) = responseQueue.get()
- if exit_status == True:
- log.msg("%s copied from cache.\n" % (file))
- log.verbose("%s copied from cache-dir %s.\n" % (file, cache_dir))
- log.debug("%s copied from cache-dir %s.\n" % (file, cache_dir))
- else:
- log.debug("%s not available in local cache %s\n" % (file, cache_dir))
- log.verbose("%s not available in local cache %s\n" % (file, cache_dir))
- exit_status = download_from_web(url, file, download_dir, checksum, NUMTHREADS, thread_name)
+ #INFO: We pass None as a filename here because we don't want to do a tree search of
+ # update files. Update files are changed daily and there is no point in doing a search of
+ # them in the cache_dir
+ response.put(func(cache_dir, None) )
+
+ #INFO: exit_status here would be False because for updates there's no need to do a
+ # find_first_match
+ # This is more with the above statement where None is passed as the filename
+ exit_status = response.get()
+
+ if exit_status == False:
+ log.msg("Downloading %s\n" % (file) )
- if exit_status:
-
- #INFO: copy to cache-dir for further use
- # Here we try copying the downloaded file to the cache-dir
- # so that if the same file is asked for again, it can be copied from the local storage device
- if cache_dir is None:
- log.debug("No cache-dir specified. Skipping copy.\n")
- elif os.access(os.path.join(cache_dir, file), os.F_OK):
- log.debug("%s is already present in %s.\n" % (file, cache_dir))
+ if download_from_web(url, file, download_path, FetcherInstance) == True:
+ log.msg("%s done.\n" % (file) )
+ if zip_bool:
+ if FetcherInstance.compress_the_file(ArgumentOptions.zip_update_file, file) != True:
+ log.err("Couldn't archive %s to file %s.\n" % (file, ArgumentOptions.zip_update_file) )
+ sys.exit(1)
+ os.unlink(os.path.join(download_path, file) )
+
+ elif key == 'Upgrade':
+ response.put(func(cache_dir, file) )
+ #INFO: find_first_match() returns False of a file name with absolute path
+ full_file_path = response.get()
+
+ if full_file_path != False:
+ if ArgumentOptions.disable_md5check is False:
+ if FetcherInstance.md5_check(full_file_path, checksum) is True:
+ if zip_bool:
+ if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, full_file_path) is True:
+ log.msg("%s copied from local cache directory %s\n" % (file, cache_dir) )
+ else:
+ try:
+ shutil.copy(full_file_path, download_path)
+ log.msg("%s copied from local cache directory %s\n" % (file, cache_dir) )
+ except shutil.Error:
+ log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, download_path) )
+
+ else:
+ log.verbose("%s MD5 checksum mismatch. Skipping file.\n" % (file) )
+ log.msg("Downloading %s - %d KB\n" % (file, download_size/1024) )
+ if download_from_web(url, file, download_path, FetcherInstance) == True:
+ log.msg("%s done.\n" % (file) )
+ if ArgumentOptions.cache_dir:
+ try:
+ shutil.copy(file, cache_dir)
+ log.verbose("%s copied to local cache directory %s\n" % (file, ArgumentOptions.cache_dir) )
+ except shutil.Error:
+ log.verbose("Couldn't copy %s to %s\n\n" % (file, ArgumentOptions.cache_dir) )
+ if ArgumentOptions.zip_it:
+ if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
+ log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+ sys.exit(1)
+ os.unlink(os.path.join(download_path, file) )
+
else:
- if os.access(cache_dir, os.W_OK):
- shutil.copy(file, cache_dir)
- log.debug("%s copied to local cache-dir %s.\n" % (file, cache_dir))
- log.verbose("%s copied to local cache-dir %s.\n" % (file, cache_dir))
-
- if zip_bool:
- ziplock.acquire()
+ #INFO: If md5check is disabled, just copy it.
try:
- compress_the_file(zip_type_file, file, download_dir)
- os.unlink(os.path.join(download_dir, file)) # Remove it because we don't need the file once it is zipped.
- finally:
- ziplock.release()
+ shutil.copy(full_file_path, download_path)
+ log.msg("%s copied from local cache directory %s\n" % (file, cache_dir) )
+ except shutil.Error:
+ log.verbose("%s already available in dest_dir. Skipping copy!!!\n\n" % (file) )
else:
- pypt_variables.errlist.append(file)
+ log.verbose("%s not available in local cache %s.\n" % (file, ArgumentOptions.cache_dir) )
+ log.msg("Downloading %s - %d KB\n" % (file, download_size/1024) )
+ if download_from_web(url, file, download_path, FetcherInstance) == True:
+ if ArgumentOptions.disable_md5check is False:
+ if FetcherInstance.md5_check(full_file_path, checksum) is True:
+
+ if ArgumentOptions.cache_dir:
+ try:
+ shutil.copy(file, ArgumentOptions.cache_dir)
+ log.verbose("%s copied to local cache directory %s\n" % (file, ArgumentOptions.cache_dir) )
+ except shutil.Error:
+ log.verbose("%s already available in %s. Skipping copy!!!\n\n" % (file, ArgumentOptions.cache_dir) )
+
+ if zip_bool:
+ if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
+ log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+ sys.exit(1)
+ log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+ os.unlink(os.path.join(download_path, file) )
+ if zip_bool:
+ if FetcherInstance.compress_the_file(ArgumentOptions.zip_upgrade_file, file) != True:
+ log.err("Couldn't archive %s to file %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+ sys.exit(1)
+ log.verbose("%s added to archive %s\n" % (file, ArgumentOptions.zip_upgrade_file) )
+ os.unlink(os.path.join(download_path, file) )
+ log.msg("%s done.\n" % (file) )
+ else:
+ log.err("Couldn't find %s\n" % (file) )
+ errlist.append(file)
+
+ # Create two Queues for the requests and responses
+ requestQueue = Queue.Queue()
+ responseQueue = Queue.Queue()
+
+ # Pool of NUMTHREADS Threads that run run().
+ thread_pool = [
+ threading.Thread(
+ target=run,
+ args=(requestQueue, responseQueue)
+ )
+ for i in range(ArgumentOptions.num_of_threads)
+ ]
+
+ # Start the threads.
+ for t in thread_pool: t.start()
+
+ # Queue up the requests.
+ #for item in raw_data_list: requestQueue.put(item)
+ for key in FetchData.keys():
+ for item in FetchData.get(key):
+ requestQueue.put( (key, item) )
+
+ # Shut down the threads after all requests end.
+ # (Put one None "sentinel" for each thread.)
+ for t in thread_pool: requestQueue.put(None)
+
+ # Don't end the program prematurely.
+ #
+ # (Note that because Queue.get() is blocking by
+ # defualt this isn't strictly necessary. But if
+ # you were, say, handling responses in another
+ # thread, you'd want something like this in your
+ # main thread.)
+ for t in thread_pool: t.join()
- # Create two Queues for the requests and responses
- requestQueue = Queue.Queue()
- responseQueue = Queue.Queue()
-
-
- # Pool of NUMTHREADS Threads that run run().
- thread_pool = [
- threading.Thread(
- target=run,
- args=(requestQueue, responseQueue)
- )
- for i in range(NUMTHREADS)
- ]
-
- # Start the threads.
- for t in thread_pool: t.start()
-
- # Queue up the requests.
- for item in raw_data_list: requestQueue.put(item)
-
- # Shut down the threads after all requests end.
- # (Put one None "sentinel" for each thread.)
- for t in thread_pool: requestQueue.put(None)
-
- # Don't end the program prematurely.
- #
- # (Note that because Queue.get() is blocking by
- # default this isn't strictly necessary. But if
- # you were, say, handling responses in another
- # thread, you'd want something like this in your
- # main thread.)
- for t in thread_pool: t.join()
-
# Print the failed files
- if len(pypt_variables.errlist) == 0:
+ if len(errlist) == 0:
pass # Don't print if nothing failed.
else:
log.err("\n\nThe following files failed to be downloaded.\n")
- for error in pypt_variables.errlist:
+ for error in errlist:
log.err("%s failed.\n" % (error))
def syncer(install_file_path, target_path, arg_type=None):
@@ -617,6 +736,7 @@
It syncs "install_file_path" which could be a valid file path
or a zip archive to "target_path'''
+ archive = Archiver()
if arg_type == 1:
try:
import zipfile
@@ -650,9 +770,9 @@
#data = file.read(filename)
if pypt_magic.file(os.path.abspath(filename)) == "application/x-bzip2":
- decompress_the_file(os.path.abspath(filename), target_path, filename, 1)
+ archive.decompress_the_file(os.path.abspath(filename), target_path, filename, 1)
elif pypt_magic.file(os.path.abspath(filename)) == "application/x-gzip":
- decompress_the_file(os.path.abspath(filename), target_path, filename, 2)
+ archive.decompress_the_file(os.path.abspath(filename), target_path, filename, 2)
elif pypt_magic.file(filename) == "PGP armored data" or pypt_magic.file(filename) == "application/x-dpkg":
if os.access(target_path, os.W_OK):
shutil.copy(filename, target_path)
@@ -670,11 +790,11 @@
log.err("Aieeee! module not found.\n")
if pypt_magic.file(os.path.join(install_file_path, eachfile)) == "application/x-bzip2":
- decompress_the_file(os.path.join(install_file_path, eachfile), target_path, eachfile, 1)
+ archive.decompress_the_file(os.path.join(install_file_path, eachfile), target_path, eachfile, 1)
elif pypt_magic.file(os.path.join(install_file_path, eachfile)) == "application/gzip":
- decompress_the_file(os.path.join(install_file_path, eachfile), target_path, eachfile, 2)
+ archive.decompress_the_file(os.path.join(install_file_path, eachfile), target_path, eachfile, 2)
elif pypt_magic.file(os.path.join(install_file_path, eachfile)) == "application/zip":
- decompress_the_file(os.path.join(install_file_path, eachfile), target_path, eachfile, 3)
+ archive.decompress_the_file(os.path.join(install_file_path, eachfile), target_path, eachfile, 3)
elif pypt_magic.file(os.path.join(install_file_path, eachfile)) == "PGP armored data" or pypt_magic.file(filename) == "application/x-dpkg":
if os.access(target_path, os.W_OK):
shutil.copy(os.path.join(install_file_path, eachfile), target_path)
@@ -686,134 +806,206 @@
'''Here we basically do the sanity checks, some validations
and then accordingly call the corresponding functions.'''
+ """Contains most of the variables that are required by the application to run.
+ Also does command-line option parsing and variable validation."""
+
+ parser = optparse.OptionParser(usage="%prog [OPTION1, OPTION2, ...]",
+ version="%prog " + version + "\n" + copyright)
+
+ parser.add_option("-d","--download-dir", dest="download_dir",
+ help="Root directory path to save the downloaded files", action="store", type="string", metavar="pypt-downloads")
+ parser.add_option("-s","--cache-dir", dest="cache_dir",
+ help="Root directory path where the pre-downloaded files will be searched. If not, give a period '.'",
+ action="store", type="string", metavar=".")
+ parser.add_option("--verbose", dest="verbose", help="Enable verbose messages", action="store_true")
+ parser.add_option("--warnings", dest="warnings", help="Enable warnings", action="store_true")
+ parser.add_option("--debug", dest="debug", help="Enable Debug mode", action="store_true")
+ parser.add_option("-u","--uris", dest="uris_file",
+ help="Full path of the uris file which contains the main database of files to be downloaded",action="store", type="string")
+ parser.add_option("","--disable-md5check", dest="disable_md5check",
+ help="Disable md5checksum validation on downloaded files",action="store_true")
+ parser.add_option("", "--threads", dest="num_of_threads", help="Number of threads to spawn",
+ action="store", type="int", metavar="1", default=1)
+
+ #INFO: Option zip is not enabled by default but is highly encouraged.
+ parser.add_option("-z","--zip", dest="zip_it", help="Zip the downloaded files to a single zip file", action="store_true")
+ parser.add_option("--zip-update-file", dest="zip_update_file", help="Default zip file for downloaded (update) data",
+ action="store", type="string", metavar="pypt-offline-update.zip", default="pypt-offline-update.zip")
+ parser.add_option("--zip-upgrade-file", dest="zip_upgrade_file", help="Default zip file for downloaded (upgrade) data",
+ action="store", type="string", metavar="pypt-offline-upgrade.zip", default="pypt-offline-upgrade.zip")
+
+ #INFO: At the moment nargs cannot be set to something like * so that optparse could manipulate n number of args. This is a limitation in optparse at the moment. The author might add this feature in the future.
+ # When fixed by the author, we'd be in a better shape to use the above mentioned line instead of relying on this improper way.
+ # With action="store_true", we are able to store all the arguments into the args variable from where it can be fetched later.
+ #parser.add_option("", "--set-install-packages", dest="set_install_packages", help="Extract the list of uris which need to be fetched for installation of the given package and its dependencies", action="store", type="string", nargs=10, metavar="package_name")
+ parser.add_option("", "--set-install", dest="set_install",
+ help="Extract the list of uris which need to be fetched for installation of the given package and its dependencies",
+ action="store", metavar="pypt-offline-install.dat")
+ parser.add_option("", "--set-install-packages", dest="set_install_packages", help="Name of the packages which need to be fetched",
+ action="store_true", metavar="package_names")
+
+ parser.add_option("", "--set-update", dest="set_update", help="Extract the list of uris which need to be fetched for updation",
+ action="store", type="string", metavar="pypt-offline-update.dat")
+ parser.add_option("", "--fetch-update", dest="fetch_update",
+ help="Fetch the list of uris which are needed for apt's databases _updation_. This command must be executed on the WITHNET machine",
+ action="store", type="string", metavar="pypt-offline-update.dat")
+ parser.add_option("", "--install-update", dest="install_update",
+ help="Install the fetched database files to the NONET machine and _update_ the apt database on the NONET machine. This command must be executed on the NONET machine",
+ action="store", type="string", metavar="pypt-offline-update.zip")
+ parser.add_option("", "--set-upgrade", dest="set_upgrade", help="Extract the list of uris which need to be fetched for _upgradation_",
+ action="store", type="string", metavar="pypt-offline-upgrade.dat")
+ parser.add_option("", "--upgrade-type", dest="upgrade_type",
+ help="Type of upgrade to do. Use one of upgrade, dist-upgrade, dselect-ugprade",
+ action="store", type="string", metavar="upgrade")
+ parser.add_option("", "--fetch-upgrade", dest="fetch_upgrade",
+ help="Fetch the list of uris which are needed for apt's databases _upgradation_. This command must be executed on the WITHNET machine",
+ action="store", type="string", metavar="pypt-offline-upgrade.dat")
+ parser.add_option("", "--install-upgrade", dest="install_upgrade",
+ help="Install the fetched packages to the NONET machine and _upgrade_ the packages on the NONET machine. This command must be executed on the NONET machine",
+ action="store", type="string", metavar="pypt-offline-upgrade.zip")
+ #global options, args
+ (options, args) = parser.parse_args()
+
try:
# The log implementation
# Instantiate the class
global log
- log = pypt_logger.log(pypt_variables.options.warnings, pypt_variables.options.verbose, pypt_variables.options.debug)
+ log = Log(options.warnings, options.verbose, options.debug)
- log.msg("pypt-offline %s\n" % (pypt_variables.version))
- log.msg("Copyright %s\n" % (pypt_variables.copyright))
+ log.msg("pypt-offline %s\n" % (version))
+ log.msg("Copyright %s\n" % (copyright))
- if pypt_variables.options.set_update:
- if platform.system() in pypt_variables.supported_platforms:
+ if options.set_update:
+ if platform.system() in supported_platforms:
if os.geteuid() != 0:
- pypt_variables.parser.error("This option requires super-user privileges. Execute as root or use sudo/su")
+ parser.error("This option requires super-user privileges. Execute as root or use sudo/su")
else:
log.msg("Generating database of files that are needed for an update.\n")
- os.environ['__pypt_set_update'] = pypt_variables.options.set_update
+ os.environ['__pypt_set_update'] = options.set_update
if os.system('/usr/bin/apt-get -qq --print-uris update > $__pypt_set_update') != 0:
log.err("FATAL: Something is wrong with the apt system.\n")
else:
- pypt_variables.parser.error("This argument is supported only on Unix like systems with apt installed\n")
+ parser.error("This argument is supported only on Unix like systems with apt installed\n")
sys.exit(0)
- if pypt_variables.options.set_upgrade or pypt_variables.options.upgrade_type:
- if not (pypt_variables.options.set_upgrade and pypt_variables.options.upgrade_type):
- pypt_variables.parser.error("Options --set-upgrade and --upgrade-type are mutually inclusive\n")
+ if options.set_upgrade or options.upgrade_type:
+ if not (options.set_upgrade and options.upgrade_type):
+ parser.error("Options --set-upgrade and --upgrade-type are mutually inclusive\n")
- if platform.system() in pypt_variables.supported_platforms:
+ if platform.system() in supported_platforms:
if os.geteuid() != 0:
- pypt_variables.parser.error("This option requires super-user privileges. Execute as root or use sudo/su")
+ parser.error("This option requires super-user privileges. Execute as root or use sudo/su")
#TODO: Use a more Pythonic way for it
- if pypt_variables.options.upgrade_type == "upgrade":
+ if options.upgrade_type == "upgrade":
log.msg("Generating database of files that are needed for an upgrade.\n")
- os.environ['__pypt_set_upgrade'] = pypt_variables.options.set_upgrade
+ os.environ['__pypt_set_upgrade'] = options.set_upgrade
if os.system('/usr/bin/apt-get -qq --print-uris upgrade > $__pypt_set_upgrade') != 0:
log.err("FATAL: Something is wrong with the apt system.\n")
- elif pypt_variables.options.upgrade_type == "dist-upgrade":
+ elif options.upgrade_type == "dist-upgrade":
log.msg("Generating database of files that are needed for a dist-upgrade.\n")
- os.environ['__pypt_set_upgrade'] = pypt_variables.options.set_upgrade
+ os.environ['__pypt_set_upgrade'] = options.set_upgrade
if os.system('/usr/bin/apt-get -qq --print-uris dist-upgrade > $__pypt_set_upgrade') != 0:
log.err("FATAL: Something is wrong with the apt system.\n")
- elif pypt_variables.options.upgrade_type == "dselect-upgrade":
+ elif options.upgrade_type == "dselect-upgrade":
log.msg("Generating database of files that are needed for a dselect-upgrade.\n")
- os.environ['__pypt_set_upgrade'] = pypt_variables.options.set_upgrade
+ os.environ['__pypt_set_upgrade'] = options.set_upgrade
if os.system('/usr/bin/apt-get -qq --print-uris dselect-upgrade > $__pypt_set_upgrade') != 0:
log.err("FATAL: Something is wrong with the apt system.\n")
else:
- pypt_variables.parser.error("Invalid upgrade argument type selected\nPlease use one of, upgrade/dist-upgrade/dselect-upgrade\n")
+ parser.error("Invalid upgrade argument type selected\nPlease use one of, upgrade/dist-upgrade/dselect-upgrade\n")
...
[truncated message content] |