[cedar-backup-svn] SF.net SVN: cedar-backup:[1096] cedar-backup2/trunk
Brought to you by:
pronovic
|
From: <pro...@us...> - 2015-01-05 20:24:34
|
Revision: 1096
http://sourceforge.net/p/cedar-backup/code/1096
Author: pronovic
Date: 2015-01-05 20:24:25 +0000 (Mon, 05 Jan 2015)
Log Message:
-----------
Add optional size-limit configuration for amazons3 extension.
Modified Paths:
--------------
cedar-backup2/trunk/CREDITS
cedar-backup2/trunk/CedarBackup2/extend/amazons3.py
cedar-backup2/trunk/CedarBackup2/xmlutil.py
cedar-backup2/trunk/Changelog
cedar-backup2/trunk/manual/src/extensions.xml
cedar-backup2/trunk/testcase/amazons3tests.py
cedar-backup2/trunk/testcase/data/amazons3.conf.2
Modified: cedar-backup2/trunk/CREDITS
===================================================================
--- cedar-backup2/trunk/CREDITS 2014-11-17 21:51:30 UTC (rev 1095)
+++ cedar-backup2/trunk/CREDITS 2015-01-05 20:24:25 UTC (rev 1096)
@@ -23,7 +23,7 @@
software, as indicated in the source code itself.
Unless otherwise indicated, all Cedar Backup source code is Copyright
-(c) 2004-2011,2013,2014 Kenneth J. Pronovici and is released under the GNU
+(c) 2004-2011,2013-2015 Kenneth J. Pronovici and is released under the GNU
General Public License, version 2. The contents of the GNU General Public
License can be found in the LICENSE file, or can be downloaded from
http://www.gnu.org/.
Modified: cedar-backup2/trunk/CedarBackup2/extend/amazons3.py
===================================================================
--- cedar-backup2/trunk/CedarBackup2/extend/amazons3.py 2014-11-17 21:51:30 UTC (rev 1095)
+++ cedar-backup2/trunk/CedarBackup2/extend/amazons3.py 2015-01-05 20:24:25 UTC (rev 1096)
@@ -8,7 +8,7 @@
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
-# Copyright (c) 2014 Kenneth J. Pronovici.
+# Copyright (c) 2014-2015 Kenneth J. Pronovici.
# All rights reserved.
#
# This program is free software; you can redistribute it and/or
@@ -94,10 +94,10 @@
import shutil
# Cedar Backup modules
-from CedarBackup2.filesystem import FilesystemList
-from CedarBackup2.util import resolveCommand, executeCommand, isRunningAsRoot, changeOwnership
-from CedarBackup2.xmlutil import createInputDom, addContainerNode, addBooleanNode, addStringNode
-from CedarBackup2.xmlutil import readFirstChild, readString, readBoolean
+from CedarBackup2.filesystem import FilesystemList, BackupFileList
+from CedarBackup2.util import resolveCommand, executeCommand, isRunningAsRoot, changeOwnership, isStartOfWeek
+from CedarBackup2.xmlutil import createInputDom, addContainerNode, addBooleanNode, addStringNode, addLongNode
+from CedarBackup2.xmlutil import readFirstChild, readString, readBoolean, readLong
from CedarBackup2.actions.util import writeIndicatorFile
from CedarBackup2.actions.constants import DIR_TIME_FORMAT, STAGE_INDICATOR
@@ -130,32 +130,42 @@
- The s3Bucket value must be a non-empty string
- The encryptCommand value, if set, must be a non-empty string
+ - The full backup size limit, if set, must be a number of bytes >= 0
+ - The incremental backup size limit, if set, must be a number of bytes >= 0
@sort: __init__, __repr__, __str__, __cmp__, warnMidnite, s3Bucket
"""
- def __init__(self, warnMidnite=None, s3Bucket=None, encryptCommand=None):
+ def __init__(self, warnMidnite=None, s3Bucket=None, encryptCommand=None,
+ fullBackupSizeLimit=None, incrementalBackupSizeLimit=None):
"""
Constructor for the C{AmazonS3Config} class.
@param warnMidnite: Whether to generate warnings for crossing midnite.
@param s3Bucket: Name of the Amazon S3 bucket in which to store the data
@param encryptCommand: Command used to encrypt backup data before upload to S3
+ @param fullBackupSizeLimit: Maximum size of a full backup, in bytes
+ @param incrementalBackupSizeLimit: Maximum size of an incremental backup, in bytes
@raise ValueError: If one of the values is invalid.
"""
self._warnMidnite = None
self._s3Bucket = None
self._encryptCommand = None
+ self._fullBackupSizeLimit = None
+ self._incrementalBackupSizeLimit = None
self.warnMidnite = warnMidnite
self.s3Bucket = s3Bucket
self.encryptCommand = encryptCommand
+ self.fullBackupSizeLimit = fullBackupSizeLimit
+ self.incrementalBackupSizeLimit = incrementalBackupSizeLimit
def __repr__(self):
"""
Official string representation for class instance.
"""
- return "AmazonS3Config(%s, %s, %s)" % (self.warnMidnite, self.s3Bucket, self.encryptCommand)
+ return "AmazonS3Config(%s, %s, %s, %s, %s)" % (self.warnMidnite, self.s3Bucket, self.encryptCommand,
+ self.fullBackupSizeLimit, self.incrementalBackupSizeLimit)
def __str__(self):
"""
@@ -186,6 +196,16 @@
return -1
else:
return 1
+ if self.fullBackupSizeLimit != other.fullBackupSizeLimit:
+ if self.fullBackupSizeLimit < other.fullBackupSizeLimit:
+ return -1
+ else:
+ return 1
+ if self.incrementalBackupSizeLimit != other.incrementalBackupSizeLimit:
+ if self.incrementalBackupSizeLimit < other.incrementalBackupSizeLimit:
+ return -1
+ else:
+ return 1
return 0
def _setWarnMidnite(self, value):
@@ -234,9 +254,59 @@
"""
return self._encryptCommand
+ def _setFullBackupSizeLimit(self, value):
+ """
+ Property target used to set the full backup size limit.
+ The value must be an integer >= 0.
+ @raise ValueError: If the value is not valid.
+ """
+ if value is None:
+ self._fullBackupSizeLimit = None
+ else:
+ try:
+ value = int(value)
+ except TypeError:
+ raise ValueError("Full backup size limit must be an integer >= 0.")
+ if value < 0:
+ raise ValueError("Full backup size limit must be an integer >= 0.")
+ self._fullBackupSizeLimit = value
+
+ def _getFullBackupSizeLimit(self):
+ """
+ Property target used to get the full backup size limit.
+ """
+ return self._fullBackupSizeLimit
+
+ def _setIncrementalBackupSizeLimit(self, value):
+ """
+ Property target used to set the incremental backup size limit.
+ The value must be an integer >= 0.
+ @raise ValueError: If the value is not valid.
+ """
+ if value is None:
+ self._incrementalBackupSizeLimit = None
+ else:
+ try:
+ value = int(value)
+ except TypeError:
+ raise ValueError("Incremental backup size limit must be an integer >= 0.")
+ if value < 0:
+ raise ValueError("Incremental backup size limit must be an integer >= 0.")
+ self._incrementalBackupSizeLimit = value
+
+ def _getIncrementalBackupSizeLimit(self):
+ """
+ Property target used to get the incremental backup size limit.
+ """
+ return self._incrementalBackupSizeLimit
+
warnMidnite = property(_getWarnMidnite, _setWarnMidnite, None, "Whether to generate warnings for crossing midnite.")
s3Bucket = property(_getS3Bucket, _setS3Bucket, None, doc="Amazon S3 Bucket in which to store data")
encryptCommand = property(_getEncryptCommand, _setEncryptCommand, None, doc="Command used to encrypt data before upload to S3")
+ fullBackupSizeLimit = property(_getFullBackupSizeLimit, _setFullBackupSizeLimit, None,
+ doc="Maximum size of a full backup, in bytes")
+ incrementalBackupSizeLimit = property(_getIncrementalBackupSizeLimit, _setIncrementalBackupSizeLimit, None,
+ doc="Maximum size of an incremental backup, in bytes")
########################################################################
@@ -379,9 +449,11 @@
We add the following fields to the document::
- warnMidnite //cb_config/amazons3/warn_midnite
- s3Bucket //cb_config/amazons3/s3_bucket
- encryptCommand //cb_config/amazons3/encrypt
+ warnMidnite //cb_config/amazons3/warn_midnite
+ s3Bucket //cb_config/amazons3/s3_bucket
+ encryptCommand //cb_config/amazons3/encrypt
+ fullBackupSizeLimit //cb_config/amazons3/full_size_limit
+ incrementalBackupSizeLimit //cb_config/amazons3/incr_size_limit
@param xmlDom: DOM tree as from C{impl.createDocument()}.
@param parentNode: Parent that the section should be appended to.
@@ -391,6 +463,8 @@
addBooleanNode(xmlDom, sectionNode, "warn_midnite", self.amazons3.warnMidnite)
addStringNode(xmlDom, sectionNode, "s3_bucket", self.amazons3.s3Bucket)
addStringNode(xmlDom, sectionNode, "encrypt", self.amazons3.encryptCommand)
+ addLongNode(xmlDom, sectionNode, "full_size_limit", self.amazons3.fullBackupSizeLimit)
+ addLongNode(xmlDom, sectionNode, "incr_size_limit", self.amazons3.incrementalBackupSizeLimit)
def _parseXmlData(self, xmlData):
"""
@@ -414,9 +488,11 @@
We read the following individual fields::
- warnMidnite //cb_config/amazons3/warn_midnite
- s3Bucket //cb_config/amazons3/s3_bucket
- encryptCommand //cb_config/amazons3/encrypt
+ warnMidnite //cb_config/amazons3/warn_midnite
+ s3Bucket //cb_config/amazons3/s3_bucket
+ encryptCommand //cb_config/amazons3/encrypt
+ fullBackupSizeLimit //cb_config/amazons3/full_size_limit
+ incrementalBackupSizeLimit //cb_config/amazons3/incr_size_limit
@param parent: Parent node to search beneath.
@@ -430,6 +506,8 @@
amazons3.warnMidnite = readBoolean(section, "warn_midnite")
amazons3.s3Bucket = readString(section, "s3_bucket")
amazons3.encryptCommand = readString(section, "encrypt")
+ amazons3.fullBackupSizeLimit = readLong(section, "full_size_limit")
+ amazons3.incrementalBackupSizeLimit = readLong(section, "incr_size_limit")
return amazons3
@@ -468,6 +546,7 @@
raise ValueError("Cedar Backup configuration is not properly filled in.")
local = LocalConfig(xmlPath=configPath)
stagingDirs = _findCorrectDailyDir(options, config, local)
+ _applySizeLimits(options, config, local, stagingDirs)
_writeToAmazonS3(config, local, stagingDirs)
_writeStoreIndicator(config, stagingDirs)
logger.info("Executed the amazons3 extended action successfully.")
@@ -534,6 +613,47 @@
##############################
+# _applySizeLimits() function
+##############################
+
+def _applySizeLimits(options, config, local, stagingDirs):
+ """
+ Apply size limits, throwing an exception if any limits are exceeded.
+
+ Size limits are optional. If a limit is set to None, it does not apply.
+ The full size limit applies if the full option is set or if today is the
+ start of the week. The incremental size limit applies otherwise. Limits
+ are applied to the total size of all the relevant staging directories.
+
+ @param options: Options object.
+ @param config: Config object.
+ @param local: Local config object.
+ @param stagingDirs: Dictionary mapping directory path to date suffix.
+
+ @raise ValueError: Under many generic error conditions
+ @raise ValueError: If a size limit has been exceeded
+ """
+ if options.full or isStartOfWeek(config.options.startingDay):
+ logger.debug("Using Amazon S3 size limit for full backups.")
+ limit = local.amazons3.fullBackupSizeLimit
+ else:
+ logger.debug("Using Amazon S3 size limit for incremental backups.")
+ limit = local.amazons3.incrementalBackupSizeLimit
+ if limit is None:
+ logger.debug("No Amazon S3 size limit will be applied.")
+ else:
+ logger.debug("Amazon S3 size limit is: %d bytes" % limit)
+ contents = BackupFileList()
+ for stagingDir in stagingDirs:
+ contents.addDir(stagingDir)
+ total = contents.totalSize()
+ logger.debug("Amazon S3 backup size is is: %d bytes" % total)
+ if total > limit:
+ logger.debug("Amazon S3 size limit exceeded: %.0f bytes > %d bytes" % (total, limit))
+ raise ValueError("Amazon S3 size limit exceeded: %.0f bytes > %d bytes" % (total, limit))
+
+
+##############################
# _writeToAmazonS3() function
##############################
Modified: cedar-backup2/trunk/CedarBackup2/xmlutil.py
===================================================================
--- cedar-backup2/trunk/CedarBackup2/xmlutil.py 2014-11-17 21:51:30 UTC (rev 1095)
+++ cedar-backup2/trunk/CedarBackup2/xmlutil.py 2015-01-05 20:24:25 UTC (rev 1096)
@@ -246,6 +246,26 @@
else:
return int(result)
+def readLong(parent, name):
+ """
+ Returns long integer contents of the first child with a given name immediately
+ beneath the parent.
+
+ By "immediately beneath" the parent, we mean from among nodes that are
+ direct children of the passed-in parent node.
+
+ @param parent: Parent node to search beneath.
+ @param name: Name of node to search for.
+
+ @return: Long integer contents of node or C{None} if no matching nodes are found.
+ @raise ValueError: If the string at the location can't be converted to an integer.
+ """
+ result = readString(parent, name)
+ if result is None:
+ return None
+ else:
+ return long(result)
+
def readFloat(parent, name):
"""
Returns float contents of the first child with a given name immediately
@@ -353,8 +373,30 @@
if nodeValue is None:
return addStringNode(xmlDom, parentNode, nodeName, None)
else:
- return addStringNode(xmlDom, parentNode, nodeName, "%d" % nodeValue)
+ return addStringNode(xmlDom, parentNode, nodeName, "%d" % nodeValue) # %d works for both int and long
+def addLongNode(xmlDom, parentNode, nodeName, nodeValue):
+ """
+ Adds a text node as the next child of a parent, to contain a long integer.
+
+ If the C{nodeValue} is None, then the node will be created, but will be
+ empty (i.e. will contain no text node child).
+
+ The integer will be converted to a string using "%d". The result will be
+ added to the document via L{addStringNode}.
+
+ @param xmlDom: DOM tree as from C{impl.createDocument()}.
+ @param parentNode: Parent node to create child for.
+ @param nodeName: Name of the new container node.
+ @param nodeValue: The value to put into the node.
+
+ @return: Reference to the newly-created node.
+ """
+ if nodeValue is None:
+ return addStringNode(xmlDom, parentNode, nodeName, None)
+ else:
+ return addStringNode(xmlDom, parentNode, nodeName, "%d" % nodeValue) # %d works for both int and long
+
def addBooleanNode(xmlDom, parentNode, nodeName, nodeValue):
"""
Adds a text node as the next child of a parent, to contain a boolean.
Modified: cedar-backup2/trunk/Changelog
===================================================================
--- cedar-backup2/trunk/Changelog 2014-11-17 21:51:30 UTC (rev 1095)
+++ cedar-backup2/trunk/Changelog 2015-01-05 20:24:25 UTC (rev 1096)
@@ -1,3 +1,7 @@
+Version 2.24.2 unreleased
+
+ * Add optional size-limit configuration for amazons3 extension.
+
Version 2.24.1 07 Oct 2014
* Implement a new tool called cback-amazons3-sync.
Modified: cedar-backup2/trunk/manual/src/extensions.xml
===================================================================
--- cedar-backup2/trunk/manual/src/extensions.xml 2014-11-17 21:51:30 UTC (rev 1095)
+++ cedar-backup2/trunk/manual/src/extensions.xml 2015-01-05 20:24:25 UTC (rev 1096)
@@ -115,11 +115,28 @@
switches over to the configured backup user to run the
<command>aws</command> program. So, make sure you configure the AWS
CLI tools as the backup user and not root. (This is different than
- the amazons3 sync tool extension, which exceutes AWS CLI command as
+ the amazons3 sync tool extension, which executes AWS CLI command as
the same user that is running the tool.)
</para>
<para>
+ When using physical media via the standard store action, there is an
+ implicit limit to the size of a backup, since a backup must fit on a
+ single disc. Since there is no physical media, no such limit exists
+ for Amazon S3 backups. This leaves open the possibility that Cedar
+ Backup might construct an unexpectedly-large backup that the
+ administrator is not aware of. Over time, this might become
+ expensive, either in terms of network bandwidth or in terms of Amazon
+ S3 storage and I/O charges. To mitigate this risk, set a reasonable
+ maximum size using the configuration elements shown below. If the
+ backup fails, you have a chance to review what made the backup larger
+ than you expected, and you can either correct the problem (i.e. remove
+ a large temporary directory that got inadvertently included in the
+ backup) or change configuration to take into account the new "normal"
+ maximum size.
+ </para>
+
+ <para>
You can optionally configure Cedar Backup to encrypt data before
sending it to S3. To do that, provide a complete command line using
the <literal>${input}</literal> and <literal>${output}</literal>
@@ -251,7 +268,39 @@
</para>
</listitem>
</varlistentry>
+
+ <varlistentry>
+ <term><literal>full_size_limit</literal></term>
+ <listitem>
+ <para>Maximum size of a full backup, in bytes</para>
+ <para>
+ If this field is provided, then a size limit will be applied
+ to full backups. If the total size of the selected staging
+ directory is greater than the limit, then the backup will
+ fail.
+ </para>
+ <para>
+ <emphasis>Restrictions:</emphasis> If provided, must be an integer greater than zero.
+ </para>
+ </listitem>
+ </varlistentry>
+ <varlistentry>
+ <term><literal>incr_size_limit</literal></term>
+ <listitem>
+ <para>Maximum size of an incremental backup, in bytes</para>
+ <para>
+ If this field is provided, then a size limit will be applied
+ to incremental backups. If the total size of the selected
+ staging directory is greater than the limit, then the backup
+ will fail.
+ </para>
+ <para>
+ <emphasis>Restrictions:</emphasis> If provided, must be an integer greater than zero.
+ </para>
+ </listitem>
+ </varlistentry>
+
</variablelist>
</sect1>
Modified: cedar-backup2/trunk/testcase/amazons3tests.py
===================================================================
--- cedar-backup2/trunk/testcase/amazons3tests.py 2014-11-17 21:51:30 UTC (rev 1095)
+++ cedar-backup2/trunk/testcase/amazons3tests.py 2015-01-05 20:24:25 UTC (rev 1096)
@@ -9,7 +9,7 @@
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
-# Copyright (c) 2014 Kenneth J. Pronovici.
+# Copyright (c) 2014-2015 Kenneth J. Pronovici.
# All rights reserved.
#
# This program is free software; you can redistribute it and/or
@@ -145,49 +145,40 @@
"""
Test constructor with no values filled in.
"""
- amazons3 = AmazonS3Config()
+ amazons3 = AmazonS3Config()
self.failUnlessEqual(False, amazons3.warnMidnite)
self.failUnlessEqual(None, amazons3.s3Bucket)
self.failUnlessEqual(None, amazons3.encryptCommand)
+ self.failUnlessEqual(None, amazons3.fullBackupSizeLimit)
+ self.failUnlessEqual(None, amazons3.incrementalBackupSizeLimit)
def testConstructor_002(self):
"""
Test constructor with all values filled in, with valid values.
"""
- amazons3 = AmazonS3Config(True, "bucket", "encrypt")
+ amazons3 = AmazonS3Config(True, "bucket", "encrypt", 1, 2)
self.failUnlessEqual(True, amazons3.warnMidnite)
self.failUnlessEqual("bucket", amazons3.s3Bucket)
self.failUnlessEqual("encrypt", amazons3.encryptCommand)
+ self.failUnlessEqual(1L, amazons3.fullBackupSizeLimit)
+ self.failUnlessEqual(2L, amazons3.incrementalBackupSizeLimit)
def testConstructor_003(self):
"""
- Test assignment of s3Bucket attribute, None value.
- """
- amazons3 = AmazonS3Config(warnMidnite=True, s3Bucket="bucket", encryptCommand="encrypt")
- self.failUnlessEqual(True, amazons3.warnMidnite)
- self.failUnlessEqual("bucket", amazons3.s3Bucket)
- self.failUnlessEqual("encrypt", amazons3.encryptCommand)
- amazons3.s3Bucket = None
- self.failUnlessEqual(True, amazons3.warnMidnite)
- self.failUnlessEqual(None, amazons3.s3Bucket)
- self.failUnlessEqual("encrypt", amazons3.encryptCommand)
-
- def testConstructor_004(self):
- """
Test assignment of warnMidnite attribute, valid value (real boolean).
"""
- amazons3 = AmazonS3Config()
+ amazons3 = AmazonS3Config()
self.failUnlessEqual(False, amazons3.warnMidnite)
amazons3.warnMidnite = True
self.failUnlessEqual(True, amazons3.warnMidnite)
amazons3.warnMidnite = False
self.failUnlessEqual(False, amazons3.warnMidnite)
- def testConstructor_005(self):
+ def testConstructor_004(self):
"""
Test assignment of warnMidnite attribute, valid value (expression).
"""
- amazons3 = AmazonS3Config()
+ amazons3 = AmazonS3Config()
self.failUnlessEqual(False, amazons3.warnMidnite)
amazons3.warnMidnite = 0
self.failUnlessEqual(False, amazons3.warnMidnite)
@@ -200,11 +191,20 @@
amazons3.warnMidnite = 3
self.failUnlessEqual(True, amazons3.warnMidnite)
+ def testConstructor_005(self):
+ """
+ Test assignment of s3Bucket attribute, None value.
+ """
+ amazons3 = AmazonS3Config(s3Bucket="bucket")
+ self.failUnlessEqual("bucket", amazons3.s3Bucket)
+ amazons3.s3Bucket = None
+ self.failUnlessEqual(None, amazons3.s3Bucket)
+
def testConstructor_006(self):
"""
Test assignment of s3Bucket attribute, valid value.
"""
- amazons3 = AmazonS3Config()
+ amazons3 = AmazonS3Config()
self.failUnlessEqual(None, amazons3.s3Bucket)
amazons3.s3Bucket = "bucket"
self.failUnlessEqual("bucket", amazons3.s3Bucket)
@@ -213,30 +213,111 @@
"""
Test assignment of s3Bucket attribute, invalid value (empty).
"""
- amazons3 = AmazonS3Config()
+ amazons3 = AmazonS3Config()
self.failUnlessEqual(None, amazons3.s3Bucket)
self.failUnlessAssignRaises(ValueError, amazons3, "s3Bucket", "")
self.failUnlessEqual(None, amazons3.s3Bucket)
def testConstructor_008(self):
"""
+ Test assignment of encryptCommand attribute, None value.
+ """
+ amazons3 = AmazonS3Config(encryptCommand="encrypt")
+ self.failUnlessEqual("encrypt", amazons3.encryptCommand)
+ amazons3.encryptCommand = None
+ self.failUnlessEqual(None, amazons3.encryptCommand)
+
+ def testConstructor_009(self):
+ """
Test assignment of encryptCommand attribute, valid value.
"""
- amazons3 = AmazonS3Config()
+ amazons3 = AmazonS3Config()
self.failUnlessEqual(None, amazons3.encryptCommand)
amazons3.encryptCommand = "encrypt"
self.failUnlessEqual("encrypt", amazons3.encryptCommand)
- def testConstructor_009(self):
+ def testConstructor_010(self):
"""
Test assignment of encryptCommand attribute, invalid value (empty).
"""
- amazons3 = AmazonS3Config()
+ amazons3 = AmazonS3Config()
self.failUnlessEqual(None, amazons3.encryptCommand)
self.failUnlessAssignRaises(ValueError, amazons3, "encryptCommand", "")
self.failUnlessEqual(None, amazons3.encryptCommand)
+ def testConstructor_011(self):
+ """
+ Test assignment of fullBackupSizeLimit attribute, None value.
+ """
+ amazons3 = AmazonS3Config(fullBackupSizeLimit=100)
+ self.failUnlessEqual(100L, amazons3.fullBackupSizeLimit)
+ amazons3.fullBackupSizeLimit = None
+ self.failUnlessEqual(None, amazons3.fullBackupSizeLimit)
+ def testConstructor_012(self):
+ """
+ Test assignment of fullBackupSizeLimit attribute, valid long value.
+ """
+ amazons3 = AmazonS3Config()
+ self.failUnlessEqual(None, amazons3.fullBackupSizeLimit)
+ amazons3.fullBackupSizeLimit = 7516192768L
+ self.failUnlessEqual(7516192768L, amazons3.fullBackupSizeLimit)
+
+ def testConstructor_013(self):
+ """
+ Test assignment of fullBackupSizeLimit attribute, valid string value.
+ """
+ amazons3 = AmazonS3Config()
+ self.failUnlessEqual(None, amazons3.fullBackupSizeLimit)
+ amazons3.fullBackupSizeLimit = "7516192768"
+ self.failUnlessEqual(7516192768L, amazons3.fullBackupSizeLimit)
+
+ def testConstructor_014(self):
+ """
+ Test assignment of fullBackupSizeLimit attribute, invalid value.
+ """
+ amazons3 = AmazonS3Config()
+ self.failUnlessEqual(None, amazons3.fullBackupSizeLimit)
+ self.failUnlessAssignRaises(ValueError, amazons3, "fullBackupSizeLimit", "xxx")
+ self.failUnlessEqual(None, amazons3.fullBackupSizeLimit)
+
+ def testConstructor_015(self):
+ """
+ Test assignment of incrementalBackupSizeLimit attribute, None value.
+ """
+ amazons3 = AmazonS3Config(incrementalBackupSizeLimit=100)
+ self.failUnlessEqual(100L, amazons3.incrementalBackupSizeLimit)
+ amazons3.incrementalBackupSizeLimit = None
+ self.failUnlessEqual(None, amazons3.incrementalBackupSizeLimit)
+
+ def testConstructor_016(self):
+ """
+ Test assignment of incrementalBackupSizeLimit attribute, valid long value.
+ """
+ amazons3 = AmazonS3Config()
+ self.failUnlessEqual(None, amazons3.incrementalBackupSizeLimit)
+ amazons3.incrementalBackupSizeLimit = 7516192768L
+ self.failUnlessEqual(7516192768L, amazons3.incrementalBackupSizeLimit)
+
+ def testConstructor_017(self):
+ """
+ Test assignment of incrementalBackupSizeLimit attribute, valid string value.
+ """
+ amazons3 = AmazonS3Config()
+ self.failUnlessEqual(None, amazons3.incrementalBackupSizeLimit)
+ amazons3.incrementalBackupSizeLimit = "7516192768"
+ self.failUnlessEqual(7516192768L, amazons3.incrementalBackupSizeLimit)
+
+ def testConstructor_018(self):
+ """
+ Test assignment of incrementalBackupSizeLimit attribute, invalid value.
+ """
+ amazons3 = AmazonS3Config()
+ self.failUnlessEqual(None, amazons3.incrementalBackupSizeLimit)
+ self.failUnlessAssignRaises(ValueError, amazons3, "incrementalBackupSizeLimit", "xxx")
+ self.failUnlessEqual(None, amazons3.incrementalBackupSizeLimit)
+
+
############################
# Test comparison operators
############################
@@ -259,8 +340,8 @@
"""
Test comparison of two identical objects, all attributes non-None.
"""
- amazons31 = AmazonS3Config(True, "bucket", "encrypt")
- amazons32 = AmazonS3Config(True, "bucket", "encrypt")
+ amazons31 = AmazonS3Config(True, "bucket", "encrypt", 1, 2)
+ amazons32 = AmazonS3Config(True, "bucket", "encrypt", 1, 2)
self.failUnlessEqual(amazons31, amazons32)
self.failUnless(amazons31 == amazons32)
self.failUnless(not amazons31 < amazons32)
@@ -301,8 +382,8 @@
"""
Test comparison of two differing objects, s3Bucket differs.
"""
- amazons31 = AmazonS3Config(True, "bucket1", "encrypt")
- amazons32 = AmazonS3Config(True, "bucket2", "encrypt")
+ amazons31 = AmazonS3Config(s3Bucket="bucket1")
+ amazons32 = AmazonS3Config(s3Bucket="bucket2")
self.failIfEqual(amazons31, amazons32)
self.failUnless(not amazons31 == amazons32)
self.failUnless(amazons31 < amazons32)
@@ -329,8 +410,8 @@
"""
Test comparison of two differing objects, encryptCommand differs.
"""
- amazons31 = AmazonS3Config(True, "bucket", "encrypt1")
- amazons32 = AmazonS3Config(True, "bucket", "encrypt2")
+ amazons31 = AmazonS3Config(encryptCommand="encrypt1")
+ amazons32 = AmazonS3Config(encryptCommand="encrypt2")
self.failIfEqual(amazons31, amazons32)
self.failUnless(not amazons31 == amazons32)
self.failUnless(amazons31 < amazons32)
@@ -339,7 +420,63 @@
self.failUnless(not amazons31 >= amazons32)
self.failUnless(amazons31 != amazons32)
+ def testComparison_008(self):
+ """
+ Test comparison of two differing objects, fullBackupSizeLimit differs (one None).
+ """
+ amazons31 = AmazonS3Config()
+ amazons32 = AmazonS3Config(fullBackupSizeLimit=1L)
+ self.failIfEqual(amazons31, amazons32)
+ self.failUnless(not amazons31 == amazons32)
+ self.failUnless(amazons31 < amazons32)
+ self.failUnless(amazons31 <= amazons32)
+ self.failUnless(not amazons31 > amazons32)
+ self.failUnless(not amazons31 >= amazons32)
+ self.failUnless(amazons31 != amazons32)
+ def testComparison_009(self):
+ """
+ Test comparison of two differing objects, fullBackupSizeLimit differs.
+ """
+ amazons31 = AmazonS3Config(fullBackupSizeLimit=1L)
+ amazons32 = AmazonS3Config(fullBackupSizeLimit=2L)
+ self.failIfEqual(amazons31, amazons32)
+ self.failUnless(not amazons31 == amazons32)
+ self.failUnless(amazons31 < amazons32)
+ self.failUnless(amazons31 <= amazons32)
+ self.failUnless(not amazons31 > amazons32)
+ self.failUnless(not amazons31 >= amazons32)
+ self.failUnless(amazons31 != amazons32)
+
+ def testComparison_010(self):
+ """
+ Test comparison of two differing objects, incrementalBackupSizeLimit differs (one None).
+ """
+ amazons31 = AmazonS3Config()
+ amazons32 = AmazonS3Config(incrementalBackupSizeLimit=1L)
+ self.failIfEqual(amazons31, amazons32)
+ self.failUnless(not amazons31 == amazons32)
+ self.failUnless(amazons31 < amazons32)
+ self.failUnless(amazons31 <= amazons32)
+ self.failUnless(not amazons31 > amazons32)
+ self.failUnless(not amazons31 >= amazons32)
+ self.failUnless(amazons31 != amazons32)
+
+ def testComparison_011(self):
+ """
+ Test comparison of two differing objects, incrementalBackupSizeLimit differs.
+ """
+ amazons31 = AmazonS3Config(incrementalBackupSizeLimit=1L)
+ amazons32 = AmazonS3Config(incrementalBackupSizeLimit=2L)
+ self.failIfEqual(amazons31, amazons32)
+ self.failUnless(not amazons31 == amazons32)
+ self.failUnless(amazons31 < amazons32)
+ self.failUnless(amazons31 <= amazons32)
+ self.failUnless(not amazons31 > amazons32)
+ self.failUnless(not amazons31 >= amazons32)
+ self.failUnless(amazons31 != amazons32)
+
+
########################
# TestLocalConfig class
########################
@@ -435,7 +572,7 @@
Test assignment of amazons3 attribute, None value.
"""
config = LocalConfig()
- config.amazons3 = None
+ config.amazons3 = None
self.failUnlessEqual(None, config.amazons3)
def testConstructor_005(self):
@@ -443,7 +580,7 @@
Test assignment of amazons3 attribute, valid value.
"""
config = LocalConfig()
- config.amazons3 = AmazonS3Config()
+ config.amazons3 = AmazonS3Config()
self.failUnlessEqual(AmazonS3Config(), config.amazons3)
def testConstructor_006(self):
@@ -477,10 +614,10 @@
Test comparison of two identical objects, all attributes non-None.
"""
config1 = LocalConfig()
- config1.amazons3 = AmazonS3Config()
+ config1.amazons3 = AmazonS3Config()
config2 = LocalConfig()
- config2.amazons3 = AmazonS3Config()
+ config2.amazons3 = AmazonS3Config()
self.failUnlessEqual(config1, config2)
self.failUnless(config1 == config2)
@@ -496,7 +633,7 @@
"""
config1 = LocalConfig()
config2 = LocalConfig()
- config2.amazons3 = AmazonS3Config()
+ config2.amazons3 = AmazonS3Config()
self.failIfEqual(config1, config2)
self.failUnless(not config1 == config2)
self.failUnless(config1 < config2)
@@ -510,10 +647,10 @@
Test comparison of two differing objects, s3Bucket differs.
"""
config1 = LocalConfig()
- config1.amazons3 = AmazonS3Config(True, "bucket1", "encrypt")
+ config1.amazons3 = AmazonS3Config(True, "bucket1", "encrypt", 1, 2)
config2 = LocalConfig()
- config2.amazons3 = AmazonS3Config(True, "bucket2", "encrypt")
+ config2.amazons3 = AmazonS3Config(True, "bucket2", "encrypt", 1, 2)
self.failIfEqual(config1, config2)
self.failUnless(not config1 == config2)
@@ -533,7 +670,7 @@
Test validate on a None amazons3 section.
"""
config = LocalConfig()
- config.amazons3 = None
+ config.amazons3 = None
self.failUnlessRaises(ValueError, config.validate)
def testValidate_002(self):
@@ -541,7 +678,7 @@
Test validate on an empty amazons3 section.
"""
config = LocalConfig()
- config.amazons3 = AmazonS3Config()
+ config.amazons3 = AmazonS3Config()
self.failUnlessRaises(ValueError, config.validate)
def testValidate_003(self):
@@ -549,7 +686,7 @@
Test validate on a non-empty amazons3 section with no values filled in.
"""
config = LocalConfig()
- config.amazons3 = AmazonS3Config(None)
+ config.amazons3 = AmazonS3Config(None)
self.failUnlessRaises(ValueError, config.validate)
def testValidate_005(self):
@@ -557,7 +694,7 @@
Test validate on a non-empty amazons3 section with valid values filled in.
"""
config = LocalConfig()
- config.amazons3 = AmazonS3Config(True, "bucket")
+ config.amazons3 = AmazonS3Config(True, "bucket")
config.validate()
@@ -589,11 +726,15 @@
self.failUnlessEqual(True, config.amazons3.warnMidnite)
self.failUnlessEqual("mybucket", config.amazons3.s3Bucket)
self.failUnlessEqual("encrypt", config.amazons3.encryptCommand)
+ self.failUnlessEqual(5368709120L, config.amazons3.fullBackupSizeLimit)
+ self.failUnlessEqual(2147483648, config.amazons3.incrementalBackupSizeLimit)
config = LocalConfig(xmlData=contents, validate=False)
self.failIfEqual(None, config.amazons3)
self.failUnlessEqual(True, config.amazons3.warnMidnite)
self.failUnlessEqual("mybucket", config.amazons3.s3Bucket)
self.failUnlessEqual("encrypt", config.amazons3.encryptCommand)
+ self.failUnlessEqual(5368709120L, config.amazons3.fullBackupSizeLimit)
+ self.failUnlessEqual(2147483648, config.amazons3.incrementalBackupSizeLimit)
###################
@@ -604,18 +745,18 @@
"""
Test with empty config document.
"""
- amazons3 = AmazonS3Config()
+ amazons3 = AmazonS3Config()
config = LocalConfig()
- config.amazons3 = amazons3
+ config.amazons3 = amazons3
self.validateAddConfig(config)
def testAddConfig_002(self):
"""
Test with values set.
"""
- amazons3 = AmazonS3Config(True, "bucket", "encrypt")
+ amazons3 = AmazonS3Config(True, "bucket", "encrypt", 1, 2)
config = LocalConfig()
- config.amazons3 = amazons3
+ config.amazons3 = amazons3
self.validateAddConfig(config)
Modified: cedar-backup2/trunk/testcase/data/amazons3.conf.2
===================================================================
--- cedar-backup2/trunk/testcase/data/amazons3.conf.2 2014-11-17 21:51:30 UTC (rev 1095)
+++ cedar-backup2/trunk/testcase/data/amazons3.conf.2 2015-01-05 20:24:25 UTC (rev 1096)
@@ -5,5 +5,7 @@
<warn_midnite>Y</warn_midnite>
<s3_bucket>mybucket</s3_bucket>
<encrypt>encrypt</encrypt>
+ <full_size_limit>5368709120</full_size_limit> <!-- 5 GB -->
+ <incr_size_limit>2147483648</incr_size_limit> <!-- 2 GB -->
</amazons3>
</cb_config>
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|