Commit a65e94c1 authored by Collin Winter's avatar Collin Winter

Convert raise statements in bsddb.

parent 865ea892
...@@ -199,7 +199,7 @@ class _DBWithCursor(_iter_mixin): ...@@ -199,7 +199,7 @@ class _DBWithCursor(_iter_mixin):
def _checkOpen(self): def _checkOpen(self):
if self.db is None: if self.db is None:
raise error, "BSDDB object has already been closed" raise error("BSDDB object has already been closed")
def isOpen(self): def isOpen(self):
return self.db is not None return self.db is not None
...@@ -483,7 +483,7 @@ def _openDBEnv(cachesize): ...@@ -483,7 +483,7 @@ def _openDBEnv(cachesize):
if cachesize >= 20480: if cachesize >= 20480:
e.set_cachesize(0, cachesize) e.set_cachesize(0, cachesize)
else: else:
raise error, "cachesize must be >= 20480" raise error("cachesize must be >= 20480")
e.set_lk_detect(db.DB_LOCK_DEFAULT) e.set_lk_detect(db.DB_LOCK_DEFAULT)
e.open('.', db.DB_PRIVATE | db.DB_CREATE | db.DB_THREAD | db.DB_INIT_LOCK | db.DB_INIT_MPOOL) e.open('.', db.DB_PRIVATE | db.DB_CREATE | db.DB_THREAD | db.DB_INIT_LOCK | db.DB_INIT_MPOOL)
return e return e
...@@ -505,7 +505,7 @@ def _checkflag(flag, file): ...@@ -505,7 +505,7 @@ def _checkflag(flag, file):
if file is not None and os.path.isfile(file): if file is not None and os.path.isfile(file):
os.unlink(file) os.unlink(file)
else: else:
raise error, "flags should be one of 'r', 'w', 'c' or 'n', not "+repr(flag) raise error("flags should be one of 'r', 'w', 'c' or 'n', not "+repr(flag))
return flags | db.DB_THREAD return flags | db.DB_THREAD
#---------------------------------------------------------------------- #----------------------------------------------------------------------
......
...@@ -48,4 +48,4 @@ else: ...@@ -48,4 +48,4 @@ else:
from _bsddb import __version__ from _bsddb import __version__
if version() < (3, 2, 0): if version() < (3, 2, 0):
raise ImportError, "correct BerkeleyDB symbols not found. Perhaps python was statically linked with an older version?" raise ImportError("correct BerkeleyDB symbols not found. Perhaps python was statically linked with an older version?")
...@@ -46,12 +46,12 @@ class DBRecIO: ...@@ -46,12 +46,12 @@ class DBRecIO:
def isatty(self): def isatty(self):
if self.closed: if self.closed:
raise ValueError, "I/O operation on closed file" raise ValueError("I/O operation on closed file")
return 0 return 0
def seek(self, pos, mode = 0): def seek(self, pos, mode = 0):
if self.closed: if self.closed:
raise ValueError, "I/O operation on closed file" raise ValueError("I/O operation on closed file")
if mode == 1: if mode == 1:
pos = pos + self.pos pos = pos + self.pos
elif mode == 2: elif mode == 2:
...@@ -60,12 +60,12 @@ class DBRecIO: ...@@ -60,12 +60,12 @@ class DBRecIO:
def tell(self): def tell(self):
if self.closed: if self.closed:
raise ValueError, "I/O operation on closed file" raise ValueError("I/O operation on closed file")
return self.pos return self.pos
def read(self, n = -1): def read(self, n = -1):
if self.closed: if self.closed:
raise ValueError, "I/O operation on closed file" raise ValueError("I/O operation on closed file")
if n < 0: if n < 0:
newpos = self.len newpos = self.len
else: else:
...@@ -111,7 +111,7 @@ class DBRecIO: ...@@ -111,7 +111,7 @@ class DBRecIO:
def truncate(self, size=None): def truncate(self, size=None):
if self.closed: if self.closed:
raise ValueError, "I/O operation on closed file" raise ValueError("I/O operation on closed file")
if size is None: if size is None:
size = self.pos size = self.pos
elif size < 0: elif size < 0:
...@@ -123,7 +123,7 @@ class DBRecIO: ...@@ -123,7 +123,7 @@ class DBRecIO:
def write(self, s): def write(self, s):
if self.closed: if self.closed:
raise ValueError, "I/O operation on closed file" raise ValueError("I/O operation on closed file")
if not s: return if not s: return
if self.pos > self.len: if self.pos > self.len:
self.buflist.append('\0'*(self.pos - self.len)) self.buflist.append('\0'*(self.pos - self.len))
...@@ -137,7 +137,7 @@ class DBRecIO: ...@@ -137,7 +137,7 @@ class DBRecIO:
def flush(self): def flush(self):
if self.closed: if self.closed:
raise ValueError, "I/O operation on closed file" raise ValueError("I/O operation on closed file")
""" """
......
...@@ -69,7 +69,7 @@ def open(filename, flags=db.DB_CREATE, mode=0o660, filetype=db.DB_HASH, ...@@ -69,7 +69,7 @@ def open(filename, flags=db.DB_CREATE, mode=0o660, filetype=db.DB_HASH,
elif sflag == 'n': elif sflag == 'n':
flags = db.DB_TRUNCATE | db.DB_CREATE flags = db.DB_TRUNCATE | db.DB_CREATE
else: else:
raise db.DBError, "flags should be one of 'r', 'w', 'c' or 'n' or use the bsddb.db.DB_* flags" raise db.DBError("flags should be one of 'r', 'w', 'c' or 'n' or use the bsddb.db.DB_* flags")
d = DBShelf(dbenv) d = DBShelf(dbenv)
d.open(filename, dbname, filetype, flags, mode) d.open(filename, dbname, filetype, flags, mode)
...@@ -155,7 +155,7 @@ class DBShelf(DictMixin): ...@@ -155,7 +155,7 @@ class DBShelf(DictMixin):
if self.get_type() != db.DB_RECNO: if self.get_type() != db.DB_RECNO:
self.append = self.__append self.append = self.__append
return self.append(value, txn=txn) return self.append(value, txn=txn)
raise db.DBError, "append() only supported when dbshelve opened with filetype=dbshelve.db.DB_RECNO" raise db.DBError("append() only supported when dbshelve opened with filetype=dbshelve.db.DB_RECNO")
def associate(self, secondaryDB, callback, flags=0): def associate(self, secondaryDB, callback, flags=0):
......
...@@ -244,7 +244,7 @@ class bsdTableDB : ...@@ -244,7 +244,7 @@ class bsdTableDB :
columnlist_key = _columns_key(table) columnlist_key = _columns_key(table)
if self.db.has_key(columnlist_key): if self.db.has_key(columnlist_key):
raise TableAlreadyExists, "table already exists" raise TableAlreadyExists("table already exists")
txn = self.env.txn_begin() txn = self.env.txn_begin()
# store the table's column info # store the table's column info
...@@ -261,7 +261,7 @@ class bsdTableDB : ...@@ -261,7 +261,7 @@ class bsdTableDB :
txn.commit() txn.commit()
txn = None txn = None
except DBError as dberror: except DBError as dberror:
raise TableDBError, dberror.args[1] raise TableDBError(dberror.args[1])
finally: finally:
if txn: if txn:
txn.abort() txn.abort()
...@@ -273,7 +273,7 @@ class bsdTableDB : ...@@ -273,7 +273,7 @@ class bsdTableDB :
""" """
assert isinstance(table, str) assert isinstance(table, str)
if contains_metastrings(table): if contains_metastrings(table):
raise ValueError, "bad table name: contains reserved metastrings" raise ValueError("bad table name: contains reserved metastrings")
columnlist_key = _columns_key(table) columnlist_key = _columns_key(table)
if not self.db.has_key(columnlist_key): if not self.db.has_key(columnlist_key):
...@@ -340,7 +340,7 @@ class bsdTableDB : ...@@ -340,7 +340,7 @@ class bsdTableDB :
self.__load_column_info(table) self.__load_column_info(table)
except DBError as dberror: except DBError as dberror:
raise TableDBError, dberror.args[1] raise TableDBError(dberror.args[1])
finally: finally:
if txn: if txn:
txn.abort() txn.abort()
...@@ -352,9 +352,9 @@ class bsdTableDB : ...@@ -352,9 +352,9 @@ class bsdTableDB :
try: try:
tcolpickles = self.db.get(_columns_key(table)) tcolpickles = self.db.get(_columns_key(table))
except DBNotFoundError: except DBNotFoundError:
raise TableDBError, "unknown table: %r" % (table,) raise TableDBError("unknown table: %r" % (table,))
if not tcolpickles: if not tcolpickles:
raise TableDBError, "unknown table: %r" % (table,) raise TableDBError("unknown table: %r" % (table,))
self.__tablecolumns[table] = pickle.loads(tcolpickles) self.__tablecolumns[table] = pickle.loads(tcolpickles)
def __new_rowid(self, table, txn) : def __new_rowid(self, table, txn) :
...@@ -388,14 +388,14 @@ class bsdTableDB : ...@@ -388,14 +388,14 @@ class bsdTableDB :
txn = None txn = None
try: try:
if not self.db.has_key(_columns_key(table)): if not self.db.has_key(_columns_key(table)):
raise TableDBError, "unknown table" raise TableDBError("unknown table")
# check the validity of each column name # check the validity of each column name
if table not in self.__tablecolumns: if table not in self.__tablecolumns:
self.__load_column_info(table) self.__load_column_info(table)
for column in rowdict.keys() : for column in rowdict.keys() :
if not self.__tablecolumns[table].count(column): if not self.__tablecolumns[table].count(column):
raise TableDBError, "unknown column: %r" % (column,) raise TableDBError("unknown column: %r" % (column,))
# get a unique row identifier for this row # get a unique row identifier for this row
txn = self.env.txn_begin() txn = self.env.txn_begin()
...@@ -419,7 +419,7 @@ class bsdTableDB : ...@@ -419,7 +419,7 @@ class bsdTableDB :
txn.abort() txn.abort()
self.db.delete(_rowid_key(table, rowid)) self.db.delete(_rowid_key(table, rowid))
txn = None txn = None
raise TableDBError, dberror.args[1], info[2] raise TableDBError(dberror.args[1]).with_traceback(info[2])
finally: finally:
if txn: if txn:
txn.abort() txn.abort()
...@@ -473,7 +473,7 @@ class bsdTableDB : ...@@ -473,7 +473,7 @@ class bsdTableDB :
txn.abort() txn.abort()
except DBError as dberror: except DBError as dberror:
raise TableDBError, dberror.args[1] raise TableDBError(dberror.args[1])
def Delete(self, table, conditions={}): def Delete(self, table, conditions={}):
"""Delete(table, conditions) - Delete items matching the given """Delete(table, conditions) - Delete items matching the given
...@@ -513,7 +513,7 @@ class bsdTableDB : ...@@ -513,7 +513,7 @@ class bsdTableDB :
if txn: if txn:
txn.abort() txn.abort()
except DBError as dberror: except DBError as dberror:
raise TableDBError, dberror.args[1] raise TableDBError(dberror.args[1])
def Select(self, table, columns, conditions={}): def Select(self, table, columns, conditions={}):
...@@ -533,7 +533,7 @@ class bsdTableDB : ...@@ -533,7 +533,7 @@ class bsdTableDB :
columns = self.__tablecolumns[table] columns = self.__tablecolumns[table]
matching_rowids = self.__Select(table, columns, conditions) matching_rowids = self.__Select(table, columns, conditions)
except DBError as dberror: except DBError as dberror:
raise TableDBError, dberror.args[1] raise TableDBError(dberror.args[1])
# return the matches as a list of dictionaries # return the matches as a list of dictionaries
return matching_rowids.values() return matching_rowids.values()
...@@ -554,7 +554,7 @@ class bsdTableDB : ...@@ -554,7 +554,7 @@ class bsdTableDB :
columns = self.tablecolumns[table] columns = self.tablecolumns[table]
for column in (columns + list(conditions.keys())): for column in (columns + list(conditions.keys())):
if not self.__tablecolumns[table].count(column): if not self.__tablecolumns[table].count(column):
raise TableDBError, "unknown column: %r" % (column,) raise TableDBError("unknown column: %r" % (column,))
# keyed on rows that match so far, containings dicts keyed on # keyed on rows that match so far, containings dicts keyed on
# column names containing the data for that row and column. # column names containing the data for that row and column.
...@@ -708,7 +708,7 @@ class bsdTableDB : ...@@ -708,7 +708,7 @@ class bsdTableDB :
del self.__tablecolumns[table] del self.__tablecolumns[table]
except DBError as dberror: except DBError as dberror:
raise TableDBError, dberror.args[1] raise TableDBError(dberror.args[1])
finally: finally:
if txn: if txn:
txn.abort() txn.abort()
...@@ -735,7 +735,7 @@ class BasicTransactionTestCase(BasicTestCase): ...@@ -735,7 +735,7 @@ class BasicTransactionTestCase(BasicTestCase):
except db.DBError as e: except db.DBError as e:
pass pass
else: else:
raise RuntimeError, "DBTxn.abort() called after DB_TXN no longer valid w/o an exception" raise RuntimeError("DBTxn.abort() called after DB_TXN no longer valid w/o an exception")
txn = self.env.txn_begin() txn = self.env.txn_begin()
txn.commit() txn.commit()
...@@ -744,7 +744,7 @@ class BasicTransactionTestCase(BasicTestCase): ...@@ -744,7 +744,7 @@ class BasicTransactionTestCase(BasicTestCase):
except db.DBError as e: except db.DBError as e:
pass pass
else: else:
raise RuntimeError, "DBTxn.commit() called after DB_TXN no longer valid w/o an exception" raise RuntimeError("DBTxn.commit() called after DB_TXN no longer valid w/o an exception")
class BTreeTransactionTestCase(BasicTransactionTestCase): class BTreeTransactionTestCase(BasicTransactionTestCase):
......
...@@ -192,7 +192,7 @@ class BtreeExceptionsTestCase (AbstractBtreeKeyCompareTestCase): ...@@ -192,7 +192,7 @@ class BtreeExceptionsTestCase (AbstractBtreeKeyCompareTestCase):
if l == r: if l == r:
# pass the set_bt_compare test # pass the set_bt_compare test
return 0 return 0
raise RuntimeError, "i'm a naughty comparison function" raise RuntimeError("i'm a naughty comparison function")
self.createDB (bad_comparator) self.createDB (bad_comparator)
#print "\n*** test should print 2 uncatchable tracebacks ***" #print "\n*** test should print 2 uncatchable tracebacks ***"
self.addDataToDB (['a', 'b', 'c']) # this should raise, but... self.addDataToDB (['a', 'b', 'c']) # this should raise, but...
......
...@@ -355,7 +355,7 @@ class TableDBTestCase(unittest.TestCase): ...@@ -355,7 +355,7 @@ class TableDBTestCase(unittest.TestCase):
# success, the string value in mappings isn't callable # success, the string value in mappings isn't callable
pass pass
else: else:
raise RuntimeError, "why was TypeError not raised for bad callable?" raise RuntimeError("why was TypeError not raised for bad callable?")
# Delete key in select conditions # Delete key in select conditions
values = self.tdb.Select( values = self.tdb.Select(
......
...@@ -48,9 +48,9 @@ class pickleTestCase(unittest.TestCase): ...@@ -48,9 +48,9 @@ class pickleTestCase(unittest.TestCase):
#print repr(pickledEgg) #print repr(pickledEgg)
rottenEgg = pickle.loads(pickledEgg) rottenEgg = pickle.loads(pickledEgg)
if rottenEgg.args != egg.args or type(rottenEgg) != type(egg): if rottenEgg.args != egg.args or type(rottenEgg) != type(egg):
raise Exception, (rottenEgg, '!=', egg) raise Exception(rottenEgg, '!=', egg)
else: else:
raise Exception, "where's my DBError exception?!?" raise Exception("where's my DBError exception?!?")
self.db.close() self.db.close()
self.env.close() self.env.close()
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment