Commit 9ce23844 authored by Collin Winter's avatar Collin Winter
Browse files

Raise statement normalization in Lib/.

parent 26261c75
......@@ -344,7 +344,7 @@ class RawConfigParser:
def getboolean(self, section, option):
v = self.get(section, option)
if v.lower() not in self._boolean_states:
raise ValueError, 'Not a boolean: %s' % v
raise ValueError('Not a boolean: %s' % v)
return self._boolean_states[v.lower()]
def optionxform(self, optionstr):
......
......@@ -127,8 +127,8 @@ class DictMixin:
return default
def pop(self, key, *args):
if len(args) > 1:
raise TypeError, "pop expected at most 2 arguments, got "\
+ repr(1 + len(args))
raise TypeError("pop expected at most 2 arguments, got "
+ repr(1 + len(args)))
try:
value = self[key]
except KeyError:
......@@ -141,7 +141,7 @@ class DictMixin:
try:
k, v = next(self.iteritems())
except StopIteration:
raise KeyError, 'container is empty'
raise KeyError('container is empty')
del self[k]
return (k, v)
def update(self, other=None, **kwargs):
......
......@@ -183,7 +183,7 @@ class MutableString(UserString):
def __init__(self, string=""):
self.data = string
def __hash__(self):
raise TypeError, "unhashable type (it is mutable)"
raise TypeError("unhashable type (it is mutable)")
def __setitem__(self, index, sub):
if isinstance(index, slice):
if isinstance(sub, UserString):
......
......@@ -287,14 +287,14 @@ class Aifc_read:
self._soundpos = 0
self._file = Chunk(file)
if self._file.getname() != 'FORM':
raise Error, 'file does not start with FORM id'
raise Error('file does not start with FORM id')
formdata = self._file.read(4)
if formdata == 'AIFF':
self._aifc = 0
elif formdata == 'AIFC':
self._aifc = 1
else:
raise Error, 'not an AIFF or AIFF-C file'
raise Error('not an AIFF or AIFF-C file')
self._comm_chunk_read = 0
while 1:
self._ssnd_seek_needed = 1
......@@ -317,10 +317,10 @@ class Aifc_read:
elif chunkname in _skiplist:
pass
else:
raise Error, 'unrecognized chunk type '+chunk.chunkname
raise Error('unrecognized chunk type '+chunk.chunkname)
chunk.skip()
if not self._comm_chunk_read or not self._ssnd_chunk:
raise Error, 'COMM chunk and/or SSND chunk missing'
raise Error('COMM chunk and/or SSND chunk missing')
if self._aifc and self._decomp:
import cl
params = [cl.ORIGINAL_FORMAT, 0,
......@@ -331,7 +331,7 @@ class Aifc_read:
elif self._nchannels == 2:
params[1] = cl.STEREO_INTERLEAVED
else:
raise Error, 'cannot compress more than 2 channels'
raise Error('cannot compress more than 2 channels')
self._decomp.SetParams(params)
def __init__(self, f):
......@@ -394,11 +394,11 @@ class Aifc_read:
for marker in self._markers:
if id == marker[0]:
return marker
raise Error, 'marker %r does not exist' % (id,)
raise Error('marker %r does not exist' % (id,))
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error, 'position not in range'
raise Error('position not in range')
self._soundpos = pos
self._ssnd_seek_needed = 1
......@@ -488,7 +488,7 @@ class Aifc_read:
return
except ImportError:
pass
raise Error, 'cannot read compressed AIFF-C files'
raise Error('cannot read compressed AIFF-C files')
if self._comptype == 'ULAW':
scheme = cl.G711_ULAW
self._framesize = self._framesize / 2
......@@ -496,7 +496,7 @@ class Aifc_read:
scheme = cl.G711_ALAW
self._framesize = self._framesize / 2
else:
raise Error, 'unsupported compression type'
raise Error('unsupported compression type')
self._decomp = cl.OpenDecompressor(scheme)
self._convert = self._decomp_data
else:
......@@ -594,53 +594,53 @@ class Aifc_write:
#
def aiff(self):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
raise Error('cannot change parameters after starting to write')
self._aifc = 0
def aifc(self):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
raise Error('cannot change parameters after starting to write')
self._aifc = 1
def setnchannels(self, nchannels):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
raise Error('cannot change parameters after starting to write')
if nchannels < 1:
raise Error, 'bad # of channels'
raise Error('bad # of channels')
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error, 'number of channels not set'
raise Error('number of channels not set')
return self._nchannels
def setsampwidth(self, sampwidth):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
raise Error('cannot change parameters after starting to write')
if sampwidth < 1 or sampwidth > 4:
raise Error, 'bad sample width'
raise Error('bad sample width')
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._sampwidth:
raise Error, 'sample width not set'
raise Error('sample width not set')
return self._sampwidth
def setframerate(self, framerate):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
raise Error('cannot change parameters after starting to write')
if framerate <= 0:
raise Error, 'bad frame rate'
raise Error('bad frame rate')
self._framerate = framerate
def getframerate(self):
if not self._framerate:
raise Error, 'frame rate not set'
raise Error('frame rate not set')
return self._framerate
def setnframes(self, nframes):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
raise Error('cannot change parameters after starting to write')
self._nframes = nframes
def getnframes(self):
......@@ -648,9 +648,9 @@ class Aifc_write:
def setcomptype(self, comptype, compname):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
raise Error('cannot change parameters after starting to write')
if comptype not in ('NONE', 'ULAW', 'ALAW', 'G722'):
raise Error, 'unsupported compression type'
raise Error('unsupported compression type')
self._comptype = comptype
self._compname = compname
......@@ -668,9 +668,9 @@ class Aifc_write:
def setparams(self, params):
nchannels, sampwidth, framerate, nframes, comptype, compname = params
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
raise Error('cannot change parameters after starting to write')
if comptype not in ('NONE', 'ULAW', 'ALAW', 'G722'):
raise Error, 'unsupported compression type'
raise Error('unsupported compression type')
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
......@@ -679,17 +679,17 @@ class Aifc_write:
def getparams(self):
if not self._nchannels or not self._sampwidth or not self._framerate:
raise Error, 'not all parameters set'
raise Error('not all parameters set')
return self._nchannels, self._sampwidth, self._framerate, \
self._nframes, self._comptype, self._compname
def setmark(self, id, pos, name):
if id <= 0:
raise Error, 'marker ID must be > 0'
raise Error('marker ID must be > 0')
if pos < 0:
raise Error, 'marker position must be >= 0'
raise Error('marker position must be >= 0')
if type(name) != type(''):
raise Error, 'marker name must be a string'
raise Error('marker name must be a string')
for i in range(len(self._markers)):
if id == self._markers[i][0]:
self._markers[i] = id, pos, name
......@@ -700,7 +700,7 @@ class Aifc_write:
for marker in self._markers:
if id == marker[0]:
return marker
raise Error, 'marker %r does not exist' % (id,)
raise Error('marker %r does not exist' % (id,))
def getmarkers(self):
if len(self._markers) == 0:
......@@ -770,18 +770,18 @@ class Aifc_write:
if not self._sampwidth:
self._sampwidth = 2
if self._sampwidth != 2:
raise Error, 'sample width must be 2 when compressing with ULAW or ALAW'
raise Error('sample width must be 2 when compressing with ULAW or ALAW')
if self._comptype == 'G722':
if not self._sampwidth:
self._sampwidth = 2
if self._sampwidth != 2:
raise Error, 'sample width must be 2 when compressing with G7.22 (ADPCM)'
raise Error('sample width must be 2 when compressing with G7.22 (ADPCM)')
if not self._nchannels:
raise Error, '# channels not specified'
raise Error('# channels not specified')
if not self._sampwidth:
raise Error, 'sample width not specified'
raise Error('sample width not specified')
if not self._framerate:
raise Error, 'sampling rate not specified'
raise Error('sampling rate not specified')
self._write_header(datasize)
def _init_compression(self):
......@@ -798,13 +798,13 @@ class Aifc_write:
return
except ImportError:
pass
raise Error, 'cannot write compressed AIFF-C files'
raise Error('cannot write compressed AIFF-C files')
if self._comptype == 'ULAW':
scheme = cl.G711_ULAW
elif self._comptype == 'ALAW':
scheme = cl.G711_ALAW
else:
raise Error, 'unsupported compression type'
raise Error('unsupported compression type')
self._comp = cl.OpenCompressor(scheme)
params = [cl.ORIGINAL_FORMAT, 0,
cl.BITS_PER_COMPONENT, self._sampwidth * 8,
......@@ -816,7 +816,7 @@ class Aifc_write:
elif self._nchannels == 2:
params[1] = cl.STEREO_INTERLEAVED
else:
raise Error, 'cannot compress more than 2 channels'
raise Error('cannot compress more than 2 channels')
self._comp.SetParams(params)
# the compressor produces a header which we ignore
dummy = self._comp.Compress(0, '')
......@@ -930,7 +930,7 @@ def open(f, mode=None):
elif mode in ('w', 'wb'):
return Aifc_write(f)
else:
raise Error, "mode must be 'r', 'rb', 'w', or 'wb'"
raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
openfp = open # B/W compatibility
......
......@@ -59,7 +59,7 @@ for _name in _names:
_errors.append(_mod.error)
if not _defaultmod:
raise ImportError, "no dbm clone found; tried %s" % _names
raise ImportError("no dbm clone found; tried %s" % _names)
error = tuple(_errors)
......@@ -74,10 +74,10 @@ def open(file, flag = 'r', mode = 0o666):
# flag was used so use default type
mod = _defaultmod
else:
raise error, "need 'c' or 'n' flag to open new db"
raise error("need 'c' or 'n' flag to open new db")
elif result == "":
# db type cannot be determined
raise error, "db type could not be determined"
raise error("db type could not be determined")
else:
mod = __import__(result)
return mod.open(file, flag, mode)
......@@ -66,10 +66,10 @@ class async_chat (asyncore.dispatcher):
asyncore.dispatcher.__init__ (self, conn)
def collect_incoming_data(self, data):
raise NotImplementedError, "must be implemented in subclass"
raise NotImplementedError("must be implemented in subclass")
def found_terminator(self):
raise NotImplementedError, "must be implemented in subclass"
raise NotImplementedError("must be implemented in subclass")
def set_terminator (self, term):
"Set the input delimiter. Can be a fixed string of any length, an integer, or None"
......
......@@ -313,7 +313,7 @@ class dispatcher:
self.connected = True
self.handle_connect()
else:
raise socket.error, (err, errorcode[err])
raise socket.error(err, errorcode[err])
def accept(self):
# XXX can return either an address pair or None
......
......@@ -130,7 +130,7 @@ class Bdb:
return False
def do_clear(self, arg):
raise NotImplementedError, "subclass of bdb must implement do_clear()"
raise NotImplementedError("subclass of bdb must implement do_clear()")
def break_anywhere(self, frame):
return self.canonic(frame.f_code.co_filename) in self.breaks
......
......@@ -140,7 +140,7 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
elif ctype == 'application/x-www-form-urlencoded':
clength = int(environ['CONTENT_LENGTH'])
if maxlen and clength > maxlen:
raise ValueError, 'Maximum content length exceeded'
raise ValueError('Maximum content length exceeded')
qs = fp.read(clength)
else:
qs = '' # Unknown content-type
......@@ -215,7 +215,7 @@ def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
nv = name_value.split('=', 1)
if len(nv) != 2:
if strict_parsing:
raise ValueError, "bad query field: %r" % (name_value,)
raise ValueError("bad query field: %r" % (name_value,))
# Handle case of a control-name with no equal sign
if keep_blank_values:
nv.append('')
......@@ -258,7 +258,7 @@ def parse_multipart(fp, pdict):
if 'boundary' in pdict:
boundary = pdict['boundary']
if not valid_boundary(boundary):
raise ValueError, ('Invalid boundary in multipart form: %r'
raise ValueError('Invalid boundary in multipart form: %r'
% (boundary,))
nextpart = "--" + boundary
......@@ -280,7 +280,7 @@ def parse_multipart(fp, pdict):
pass
if bytes > 0:
if maxlen and bytes > maxlen:
raise ValueError, 'Maximum content length exceeded'
raise ValueError('Maximum content length exceeded')
data = fp.read(bytes)
else:
data = ""
......@@ -520,7 +520,7 @@ class FieldStorage:
except ValueError:
pass
if maxlen and clen > maxlen:
raise ValueError, 'Maximum content length exceeded'
raise ValueError('Maximum content length exceeded')
self.length = clen
self.list = self.file = None
......@@ -542,7 +542,7 @@ class FieldStorage:
def __getattr__(self, name):
if name != 'value':
raise AttributeError, name
raise AttributeError(name)
if self.file:
self.file.seek(0)
value = self.file.read()
......@@ -556,12 +556,12 @@ class FieldStorage:
def __getitem__(self, key):
"""Dictionary style indexing."""
if self.list is None:
raise TypeError, "not indexable"
raise TypeError("not indexable")
found = []
for item in self.list:
if item.name == key: found.append(item)
if not found:
raise KeyError, key
raise KeyError(key)
if len(found) == 1:
return found[0]
else:
......@@ -603,7 +603,7 @@ class FieldStorage:
def keys(self):
"""Dictionary style keys() method."""
if self.list is None:
raise TypeError, "not indexable"
raise TypeError("not indexable")
keys = []
for item in self.list:
if item.name not in keys: keys.append(item.name)
......@@ -612,7 +612,7 @@ class FieldStorage:
def __contains__(self, key):
"""Dictionary style __contains__ method."""
if self.list is None:
raise TypeError, "not indexable"
raise TypeError("not indexable")
for item in self.list:
if item.name == key: return True
return False
......@@ -636,7 +636,7 @@ class FieldStorage:
"""Internal: read a part that is itself multipart."""
ib = self.innerboundary
if not valid_boundary(ib):
raise ValueError, 'Invalid boundary in multipart form: %r' % (ib,)
raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
self.list = []
klass = self.FieldStorageClass or self.__class__
part = klass(self.fp, {}, ib,
......@@ -817,7 +817,7 @@ class SvFormContentDict(FormContentDict):
"""
def __getitem__(self, key):
if len(self.dict[key]) > 1:
raise IndexError, 'expecting a single value'
raise IndexError('expecting a single value')
return self.dict[key][0]
def getlist(self, key):
return self.dict[key]
......
......@@ -90,7 +90,7 @@ class Chunk:
def isatty(self):
if self.closed:
raise ValueError, "I/O operation on closed file"
raise ValueError("I/O operation on closed file")
return False
def seek(self, pos, whence=0):
......@@ -100,9 +100,9 @@ class Chunk:
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
raise ValueError("I/O operation on closed file")
if not self.seekable:
raise IOError, "cannot seek"
raise IOError("cannot seek")
if whence == 1:
pos = pos + self.size_read
elif whence == 2:
......@@ -114,7 +114,7 @@ class Chunk:
def tell(self):
if self.closed:
raise ValueError, "I/O operation on closed file"
raise ValueError("I/O operation on closed file")
return self.size_read
def read(self, size=-1):
......@@ -124,7 +124,7 @@ class Chunk:
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
raise ValueError("I/O operation on closed file")
if self.size_read >= self.chunksize:
return ''
if size < 0:
......@@ -148,7 +148,7 @@ class Chunk:
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
raise ValueError("I/O operation on closed file")
if self.seekable:
try:
n = self.chunksize - self.size_read
......
......@@ -358,8 +358,8 @@ class Cmd:
nonstrings = [i for i in range(len(list))
if not isinstance(list[i], basestring)]
if nonstrings:
raise TypeError, ("list[i] not a string for i in %s" %
", ".join(map(str, nonstrings)))
raise TypeError("list[i] not a string for i in %s"
% ", ".join(map(str, nonstrings)))
size = len(list)
if size == 1:
self.stdout.write('%s\n'%str(list[0]))
......
......@@ -96,7 +96,7 @@ def _maybe_compile(compiler, source, filename, symbol):
if code:
return code
if not code1 and repr(err1) == repr(err2):
raise SyntaxError, err1
raise SyntaxError(err1)
def _compile(source, filename, symbol):
return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT)
......
......@@ -62,7 +62,7 @@ def _reduce_ex(self, proto):
state = None
else:
if base is self.__class__:
raise TypeError, "can't pickle %s objects" % base.__name__
raise TypeError("can't pickle %s objects" % base.__name__)
state = base(self)
args = (self.__class__, base, state)
try:
......@@ -153,7 +153,7 @@ def add_extension(module, name, code):
"""Register an extension code."""
code = int(code)
if not 1 <= code <= 0x7fffffff:
raise ValueError, "code out of range"
raise ValueError("code out of range")
key = (module, name)
if (_extension_registry.get(key) == code and
_inverted_registry.get(code) == key):
......
......@@ -104,9 +104,8 @@ class DictWriter:
self.fieldnames = fieldnames # list of keys for the dict
self.restval = restval # for writing short dicts
if extrasaction.lower() not in ("raise", "ignore"):
raise ValueError, \
("extrasaction (%s) must be 'raise' or 'ignore'" %
extrasaction)
raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'"
% extrasaction)
self.extrasaction = extrasaction
self.writer = writer(f, dialect, *args, **kwds)
......@@ -114,8 +113,8 @@ class DictWriter:
if self.extrasaction == "raise":
wrong_fields = [k for k in rowdict if k not in self.fieldnames]
if wrong_fields:
raise ValueError("dict contains fields not in fieldnames: " +
", ".join(wrong_fields))
raise ValueError("dict contains fields not in fieldnames: "
+ ", ".join(wrong_fields))
return [rowdict.get(key, self.restval) for key in self.fieldnames]
def writerow(self, rowdict):
......@@ -155,7 +154,7 @@ class Sniffer:
delimiters)
if not delimiter:
raise Error, "Could not determine delimiter"
raise Error("Could not determine delimiter")
class dialect(Dialect):
_name = "sniffed"
......
......@@ -2350,7 +2350,7 @@ class Context(object):
# Errors should only be risked on copies of the context
# self._ignored_flags = []
raise error, explanation
raise error(explanation)
def _ignore_all_flags(self):
"""Ignore all flags, if they are raised"""
......
......@@ -914,7 +914,7 @@ class Differ:
elif tag == 'equal':
g = self._dump(' ', a, alo, ahi)
else:
raise ValueError, 'unknown tag %r' % (tag,)
raise ValueError('unknown tag %r' % (tag,))
for line in g:
yield line
......@@ -1026,7 +1026,7 @@ class Differ:
atags += ' ' * la
btags += ' ' * lb
else:
raise ValueError, 'unknown tag %r' % (tag,)
raise ValueError('unknown tag %r' % (tag,))
for line in self._qformat(aelt, belt, atags, btags):
yield line
else:
......@@ -2005,7 +2005,7 @@ def restore(delta, which):
try:
tag = {1: "- ", 2: "+ "}[int(which)]
except KeyError:
raise ValueError, ('unknown delta choice (must be 1 or 2): %r'
raise ValueError('unknown delta choice (must be 1 or 2): %r'
% which)
prefixes = (" ", tag)
for line in delta:
......
......@@ -47,7 +47,7 @@ def distb(tb=None):
try:
tb = sys.last_traceback
except AttributeError:
raise RuntimeError, "no last traceback to disassemble"
raise RuntimeError("no last traceback to disassemble")
while tb.tb_next: tb = tb.tb_next
disassemble(tb.tb_frame.f_code, tb.tb_lasti)
......
......@@ -326,9 +326,9 @@ class _OutputRedirectingPdb(pdb.Pdb):
# [XX] Normalize with respect to os.path.pardir?
def _module_relative_path(module, path):
if not inspect.ismodule(module):
raise TypeError, 'Expected a module: %r' % module
raise TypeError('Expected a module: %r' % module)
if path.startswith('/'):
raise ValueError, 'Module-relative files may not have absolute paths'
raise ValueError('Module-relative files may not have absolute paths')
# Find the base directory for the path.
if hasattr(module, '__file__'):
......
......@@ -116,18 +116,16 @@ def search_function(encoding):
entry = getregentry()
if not isinstance(entry, codecs.CodecInfo):
if not 4 <= len(entry) <= 7:
raise CodecRegistryError,\
'module "%s" (%s) failed to register' % \
(mod.__name__, mod.__file__)
raise CodecRegistryError('module "%s" (%s) failed to register'
% (mod.__name__, mod.__file__))
if not hasattr(entry[0], '__call__') or \
not hasattr(entry[1], '__call__') or \
(entry[2] is not None and not hasattr(entry[2], '__call__')) or \
(entry[3] is not None and not hasattr(entry[3], '__call__')) or \
(len(entry) > 4 and entry[4] is not None and not hasattr(entry[4], '__call__')) or \
(len(entry) > 5 and entry[5] is not None and not hasattr(entry[5], '__call__')):
raise CodecRegistryError,\
'incompatible codecs in module "%s" (%s)' % \
(mod.__name__, mod.__file__)
raise CodecRegistryError('incompatible codecs in module "%s" (%s)'
% (mod.__name__, mod.__file__))
if len(entry)<7 or entry[6] is None:
entry += (None,)*(6-len(entry)) + (mod.__name__.split(".", 1)[1],)
entry = codecs.CodecInfo(*entry)
......
......@@ -135,7 +135,7 @@ def decode_generalized_number(extended, extpos, bias, errors):
char = ord(extended[extpos])
except IndexError:
if errors == "strict":
raise UnicodeError, "incomplete punicode string"
raise UnicodeError("incomplete punicode string")
return extpos + 1, None
extpos += 1
if 0x41 <= char <= 0x5A: # A-Z
......@@ -172,7 +172,7 @@ def insertion_sort(base, extended, errors):
char += pos // (len(base) + 1)
if char > 0x10FFFF:
if errors == "strict":
raise UnicodeError, ("Invalid character U+%x" % char)
raise UnicodeError("Invalid character U+%x" % char)
char = ord('?')
pos = pos % (len(base) + 1)
base = base[:pos] + chr(char) + base[pos:]
......@@ -202,7 +202,7 @@ class Codec(codecs.Codec):
def decode(self, input, errors='strict'):
if errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError, "Unsupported error handling "+errors
raise UnicodeError("Unsupported error handling "+errors)
res = punycode_decode(input, errors)
return res, len(input)
......@@ -213,7 +213,7 @@ class IncrementalEncoder(codecs.IncrementalEncoder):
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
if self.errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError, "Unsupported error handling "+self.errors
raise UnicodeError("Unsupported error handling "+self.errors)
return punycode_decode(input, self.errors)
class StreamWriter(Codec,codecs.StreamWriter):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment