Commit 262a47d2 authored by Ezio Melotti's avatar Ezio Melotti

Merged revisions 75407,75409-75413,75415,75419-75421 via svnmerge from

svn+ssh://pythondev@svn.python.org/python/trunk

........
  r75407 | antoine.pitrou | 2009-10-14 20:30:52 +0300 (Wed, 14 Oct 2009) | 3 lines

  Fix py3k warnings in the aifc module
........
  r75409 | antoine.pitrou | 2009-10-14 21:01:33 +0300 (Wed, 14 Oct 2009) | 3 lines

  Fix py3k warnings in bsddb
........
  r75410 | antoine.pitrou | 2009-10-14 21:09:45 +0300 (Wed, 14 Oct 2009) | 3 lines

  Silence a py3k warning claiming to affect Lib/calendar.py
........
  r75411 | antoine.pitrou | 2009-10-14 21:12:54 +0300 (Wed, 14 Oct 2009) | 3 lines

  Fix a py3k warning in the StringIO module (exhibited in test_codecencodings_cn)
........
  r75412 | antoine.pitrou | 2009-10-14 21:27:32 +0300 (Wed, 14 Oct 2009) | 3 lines

  Fix py3k warnings in the socket module
........
  r75413 | antoine.pitrou | 2009-10-14 21:31:05 +0300 (Wed, 14 Oct 2009) | 3 lines

  Fix a py3k warning in the sndhdr module (found with test_email)
........
  r75415 | antoine.pitrou | 2009-10-14 21:39:46 +0300 (Wed, 14 Oct 2009) | 3 lines

  Silence some py3k warnings claiming to affect _pyio
........
  r75419 | antoine.pitrou | 2009-10-14 21:56:11 +0300 (Wed, 14 Oct 2009) | 3 lines

  Silence py3k warning claiming to affect the random module
........
  r75420 | antoine.pitrou | 2009-10-14 22:04:48 +0300 (Wed, 14 Oct 2009) | 3 lines

  Fix py3k warnings in httplib
........
  r75421 | antoine.pitrou | 2009-10-14 22:09:48 +0300 (Wed, 14 Oct 2009) | 3 lines

  Fix py3k warnings in the uuid module
........
parent efcdd849
......@@ -128,7 +128,7 @@ class StringIO:
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if n < 0:
if n is None or n < 0:
newpos = self.len
else:
newpos = min(self.pos+n, self.len)
......
......@@ -409,7 +409,7 @@ class Aifc_read:
data = self._ssnd_chunk.read(nframes * self._framesize)
if self._convert and data:
data = self._convert(data)
self._soundpos = self._soundpos + len(data) / (self._nchannels * self._sampwidth)
self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth)
return data
#
......@@ -420,7 +420,7 @@ class Aifc_read:
import cl
dummy = self._decomp.SetParam(cl.FRAME_BUFFER_SIZE,
len(data) * 2)
return self._decomp.Decompress(len(data) / self._nchannels,
return self._decomp.Decompress(len(data) // self._nchannels,
data)
def _ulaw2lin(self, data):
......@@ -439,7 +439,7 @@ class Aifc_read:
def _read_comm_chunk(self, chunk):
self._nchannels = _read_short(chunk)
self._nframes = _read_long(chunk)
self._sampwidth = (_read_short(chunk) + 7) / 8
self._sampwidth = (_read_short(chunk) + 7) // 8
self._framerate = int(_read_float(chunk))
self._framesize = self._nchannels * self._sampwidth
if self._aifc:
......@@ -468,7 +468,7 @@ class Aifc_read:
pass
else:
self._convert = self._adpcm2lin
self._framesize = self._framesize / 4
self._framesize = self._framesize // 4
return
# for ULAW and ALAW try Compression Library
try:
......@@ -478,17 +478,17 @@ class Aifc_read:
try:
import audioop
self._convert = self._ulaw2lin
self._framesize = self._framesize / 2
self._framesize = self._framesize // 2
return
except ImportError:
pass
raise Error, 'cannot read compressed AIFF-C files'
if self._comptype == 'ULAW':
scheme = cl.G711_ULAW
self._framesize = self._framesize / 2
self._framesize = self._framesize // 2
elif self._comptype == 'ALAW':
scheme = cl.G711_ALAW
self._framesize = self._framesize / 2
self._framesize = self._framesize // 2
else:
raise Error, 'unsupported compression type'
self._decomp = cl.OpenDecompressor(scheme)
......@@ -706,7 +706,7 @@ class Aifc_write:
def writeframesraw(self, data):
self._ensure_header_written(len(data))
nframes = len(data) / (self._sampwidth * self._nchannels)
nframes = len(data) // (self._sampwidth * self._nchannels)
if self._convert:
data = self._convert(data)
self._file.write(data)
......@@ -820,17 +820,17 @@ class Aifc_write:
self._init_compression()
self._file.write('FORM')
if not self._nframes:
self._nframes = initlength / (self._nchannels * self._sampwidth)
self._nframes = initlength // (self._nchannels * self._sampwidth)
self._datalength = self._nframes * self._nchannels * self._sampwidth
if self._datalength & 1:
self._datalength = self._datalength + 1
if self._aifc:
if self._comptype in ('ULAW', 'ALAW'):
self._datalength = self._datalength / 2
self._datalength = self._datalength // 2
if self._datalength & 1:
self._datalength = self._datalength + 1
elif self._comptype == 'G722':
self._datalength = (self._datalength + 3) / 4
self._datalength = (self._datalength + 3) // 4
if self._datalength & 1:
self._datalength = self._datalength + 1
self._form_length_pos = self._file.tell()
......
......@@ -42,91 +42,91 @@ else :
class DBEnv:
def __init__(self, *args, **kwargs):
self._cobj = apply(db.DBEnv, args, kwargs)
self._cobj = db.DBEnv(*args, **kwargs)
def close(self, *args, **kwargs):
return apply(self._cobj.close, args, kwargs)
return self._cobj.close(*args, **kwargs)
def open(self, *args, **kwargs):
return apply(self._cobj.open, args, kwargs)
return self._cobj.open(*args, **kwargs)
def remove(self, *args, **kwargs):
return apply(self._cobj.remove, args, kwargs)
return self._cobj.remove(*args, **kwargs)
def set_shm_key(self, *args, **kwargs):
return apply(self._cobj.set_shm_key, args, kwargs)
return self._cobj.set_shm_key(*args, **kwargs)
def set_cachesize(self, *args, **kwargs):
return apply(self._cobj.set_cachesize, args, kwargs)
return self._cobj.set_cachesize(*args, **kwargs)
def set_data_dir(self, *args, **kwargs):
return apply(self._cobj.set_data_dir, args, kwargs)
return self._cobj.set_data_dir(*args, **kwargs)
def set_flags(self, *args, **kwargs):
return apply(self._cobj.set_flags, args, kwargs)
return self._cobj.set_flags(*args, **kwargs)
def set_lg_bsize(self, *args, **kwargs):
return apply(self._cobj.set_lg_bsize, args, kwargs)
return self._cobj.set_lg_bsize(*args, **kwargs)
def set_lg_dir(self, *args, **kwargs):
return apply(self._cobj.set_lg_dir, args, kwargs)
return self._cobj.set_lg_dir(*args, **kwargs)
def set_lg_max(self, *args, **kwargs):
return apply(self._cobj.set_lg_max, args, kwargs)
return self._cobj.set_lg_max(*args, **kwargs)
def set_lk_detect(self, *args, **kwargs):
return apply(self._cobj.set_lk_detect, args, kwargs)
return self._cobj.set_lk_detect(*args, **kwargs)
if db.version() < (4,5):
def set_lk_max(self, *args, **kwargs):
return apply(self._cobj.set_lk_max, args, kwargs)
return self._cobj.set_lk_max(*args, **kwargs)
def set_lk_max_locks(self, *args, **kwargs):
return apply(self._cobj.set_lk_max_locks, args, kwargs)
return self._cobj.set_lk_max_locks(*args, **kwargs)
def set_lk_max_lockers(self, *args, **kwargs):
return apply(self._cobj.set_lk_max_lockers, args, kwargs)
return self._cobj.set_lk_max_lockers(*args, **kwargs)
def set_lk_max_objects(self, *args, **kwargs):
return apply(self._cobj.set_lk_max_objects, args, kwargs)
return self._cobj.set_lk_max_objects(*args, **kwargs)
def set_mp_mmapsize(self, *args, **kwargs):
return apply(self._cobj.set_mp_mmapsize, args, kwargs)
return self._cobj.set_mp_mmapsize(*args, **kwargs)
def set_timeout(self, *args, **kwargs):
return apply(self._cobj.set_timeout, args, kwargs)
return self._cobj.set_timeout(*args, **kwargs)
def set_tmp_dir(self, *args, **kwargs):
return apply(self._cobj.set_tmp_dir, args, kwargs)
return self._cobj.set_tmp_dir(*args, **kwargs)
def txn_begin(self, *args, **kwargs):
return apply(self._cobj.txn_begin, args, kwargs)
return self._cobj.txn_begin(*args, **kwargs)
def txn_checkpoint(self, *args, **kwargs):
return apply(self._cobj.txn_checkpoint, args, kwargs)
return self._cobj.txn_checkpoint(*args, **kwargs)
def txn_stat(self, *args, **kwargs):
return apply(self._cobj.txn_stat, args, kwargs)
return self._cobj.txn_stat(*args, **kwargs)
def set_tx_max(self, *args, **kwargs):
return apply(self._cobj.set_tx_max, args, kwargs)
return self._cobj.set_tx_max(*args, **kwargs)
def set_tx_timestamp(self, *args, **kwargs):
return apply(self._cobj.set_tx_timestamp, args, kwargs)
return self._cobj.set_tx_timestamp(*args, **kwargs)
def lock_detect(self, *args, **kwargs):
return apply(self._cobj.lock_detect, args, kwargs)
return self._cobj.lock_detect(*args, **kwargs)
def lock_get(self, *args, **kwargs):
return apply(self._cobj.lock_get, args, kwargs)
return self._cobj.lock_get(*args, **kwargs)
def lock_id(self, *args, **kwargs):
return apply(self._cobj.lock_id, args, kwargs)
return self._cobj.lock_id(*args, **kwargs)
def lock_put(self, *args, **kwargs):
return apply(self._cobj.lock_put, args, kwargs)
return self._cobj.lock_put(*args, **kwargs)
def lock_stat(self, *args, **kwargs):
return apply(self._cobj.lock_stat, args, kwargs)
return self._cobj.lock_stat(*args, **kwargs)
def log_archive(self, *args, **kwargs):
return apply(self._cobj.log_archive, args, kwargs)
return self._cobj.log_archive(*args, **kwargs)
def set_get_returns_none(self, *args, **kwargs):
return apply(self._cobj.set_get_returns_none, args, kwargs)
return self._cobj.set_get_returns_none(*args, **kwargs)
def log_stat(self, *args, **kwargs):
return apply(self._cobj.log_stat, args, kwargs)
return self._cobj.log_stat(*args, **kwargs)
if db.version() >= (4,1):
def dbremove(self, *args, **kwargs):
return apply(self._cobj.dbremove, args, kwargs)
return self._cobj.dbremove(*args, **kwargs)
def dbrename(self, *args, **kwargs):
return apply(self._cobj.dbrename, args, kwargs)
return self._cobj.dbrename(*args, **kwargs)
def set_encrypt(self, *args, **kwargs):
return apply(self._cobj.set_encrypt, args, kwargs)
return self._cobj.set_encrypt(*args, **kwargs)
if db.version() >= (4,4):
def lsn_reset(self, *args, **kwargs):
return apply(self._cobj.lsn_reset, args, kwargs)
return self._cobj.lsn_reset(*args, **kwargs)
class DB(MutableMapping):
def __init__(self, dbenv, *args, **kwargs):
# give it the proper DBEnv C object that its expecting
self._cobj = apply(db.DB, (dbenv._cobj,) + args, kwargs)
self._cobj = db.DB(*((dbenv._cobj,) + args), **kwargs)
# TODO are there other dict methods that need to be overridden?
def __len__(self):
......@@ -143,126 +143,126 @@ class DB(MutableMapping):
return self._cobj.__iter__()
def append(self, *args, **kwargs):
return apply(self._cobj.append, args, kwargs)
return self._cobj.append(*args, **kwargs)
def associate(self, *args, **kwargs):
return apply(self._cobj.associate, args, kwargs)
return self._cobj.associate(*args, **kwargs)
def close(self, *args, **kwargs):
return apply(self._cobj.close, args, kwargs)
return self._cobj.close(*args, **kwargs)
def consume(self, *args, **kwargs):
return apply(self._cobj.consume, args, kwargs)
return self._cobj.consume(*args, **kwargs)
def consume_wait(self, *args, **kwargs):
return apply(self._cobj.consume_wait, args, kwargs)
return self._cobj.consume_wait(*args, **kwargs)
def cursor(self, *args, **kwargs):
return apply(self._cobj.cursor, args, kwargs)
return self._cobj.cursor(*args, **kwargs)
def delete(self, *args, **kwargs):
return apply(self._cobj.delete, args, kwargs)
return self._cobj.delete(*args, **kwargs)
def fd(self, *args, **kwargs):
return apply(self._cobj.fd, args, kwargs)
return self._cobj.fd(*args, **kwargs)
def get(self, *args, **kwargs):
return apply(self._cobj.get, args, kwargs)
return self._cobj.get(*args, **kwargs)
def pget(self, *args, **kwargs):
return apply(self._cobj.pget, args, kwargs)
return self._cobj.pget(*args, **kwargs)
def get_both(self, *args, **kwargs):
return apply(self._cobj.get_both, args, kwargs)
return self._cobj.get_both(*args, **kwargs)
def get_byteswapped(self, *args, **kwargs):
return apply(self._cobj.get_byteswapped, args, kwargs)
return self._cobj.get_byteswapped(*args, **kwargs)
def get_size(self, *args, **kwargs):
return apply(self._cobj.get_size, args, kwargs)
return self._cobj.get_size(*args, **kwargs)
def get_type(self, *args, **kwargs):
return apply(self._cobj.get_type, args, kwargs)
return self._cobj.get_type(*args, **kwargs)
def join(self, *args, **kwargs):
return apply(self._cobj.join, args, kwargs)
return self._cobj.join(*args, **kwargs)
def key_range(self, *args, **kwargs):
return apply(self._cobj.key_range, args, kwargs)
return self._cobj.key_range(*args, **kwargs)
def has_key(self, *args, **kwargs):
return apply(self._cobj.has_key, args, kwargs)
return self._cobj.has_key(*args, **kwargs)
def items(self, *args, **kwargs):
return apply(self._cobj.items, args, kwargs)
return self._cobj.items(*args, **kwargs)
def keys(self, *args, **kwargs):
return apply(self._cobj.keys, args, kwargs)
return self._cobj.keys(*args, **kwargs)
def open(self, *args, **kwargs):
return apply(self._cobj.open, args, kwargs)
return self._cobj.open(*args, **kwargs)
def put(self, *args, **kwargs):
return apply(self._cobj.put, args, kwargs)
return self._cobj.put(*args, **kwargs)
def remove(self, *args, **kwargs):
return apply(self._cobj.remove, args, kwargs)
return self._cobj.remove(*args, **kwargs)
def rename(self, *args, **kwargs):
return apply(self._cobj.rename, args, kwargs)
return self._cobj.rename(*args, **kwargs)
def set_bt_minkey(self, *args, **kwargs):
return apply(self._cobj.set_bt_minkey, args, kwargs)
return self._cobj.set_bt_minkey(*args, **kwargs)
def set_bt_compare(self, *args, **kwargs):
return apply(self._cobj.set_bt_compare, args, kwargs)
return self._cobj.set_bt_compare(*args, **kwargs)
def set_cachesize(self, *args, **kwargs):
return apply(self._cobj.set_cachesize, args, kwargs)
return self._cobj.set_cachesize(*args, **kwargs)
def set_flags(self, *args, **kwargs):
return apply(self._cobj.set_flags, args, kwargs)
return self._cobj.set_flags(*args, **kwargs)
def set_h_ffactor(self, *args, **kwargs):
return apply(self._cobj.set_h_ffactor, args, kwargs)
return self._cobj.set_h_ffactor(*args, **kwargs)
def set_h_nelem(self, *args, **kwargs):
return apply(self._cobj.set_h_nelem, args, kwargs)
return self._cobj.set_h_nelem(*args, **kwargs)
def set_lorder(self, *args, **kwargs):
return apply(self._cobj.set_lorder, args, kwargs)
return self._cobj.set_lorder(*args, **kwargs)
def set_pagesize(self, *args, **kwargs):
return apply(self._cobj.set_pagesize, args, kwargs)
return self._cobj.set_pagesize(*args, **kwargs)
def set_re_delim(self, *args, **kwargs):
return apply(self._cobj.set_re_delim, args, kwargs)
return self._cobj.set_re_delim(*args, **kwargs)
def set_re_len(self, *args, **kwargs):
return apply(self._cobj.set_re_len, args, kwargs)
return self._cobj.set_re_len(*args, **kwargs)
def set_re_pad(self, *args, **kwargs):
return apply(self._cobj.set_re_pad, args, kwargs)
return self._cobj.set_re_pad(*args, **kwargs)
def set_re_source(self, *args, **kwargs):
return apply(self._cobj.set_re_source, args, kwargs)
return self._cobj.set_re_source(*args, **kwargs)
def set_q_extentsize(self, *args, **kwargs):
return apply(self._cobj.set_q_extentsize, args, kwargs)
return self._cobj.set_q_extentsize(*args, **kwargs)
def stat(self, *args, **kwargs):
return apply(self._cobj.stat, args, kwargs)
return self._cobj.stat(*args, **kwargs)
def sync(self, *args, **kwargs):
return apply(self._cobj.sync, args, kwargs)
return self._cobj.sync(*args, **kwargs)
def type(self, *args, **kwargs):
return apply(self._cobj.type, args, kwargs)
return self._cobj.type(*args, **kwargs)
def upgrade(self, *args, **kwargs):
return apply(self._cobj.upgrade, args, kwargs)
return self._cobj.upgrade(*args, **kwargs)
def values(self, *args, **kwargs):
return apply(self._cobj.values, args, kwargs)
return self._cobj.values(*args, **kwargs)
def verify(self, *args, **kwargs):
return apply(self._cobj.verify, args, kwargs)
return self._cobj.verify(*args, **kwargs)
def set_get_returns_none(self, *args, **kwargs):
return apply(self._cobj.set_get_returns_none, args, kwargs)
return self._cobj.set_get_returns_none(*args, **kwargs)
if db.version() >= (4,1):
def set_encrypt(self, *args, **kwargs):
return apply(self._cobj.set_encrypt, args, kwargs)
return self._cobj.set_encrypt(*args, **kwargs)
class DBSequence:
def __init__(self, *args, **kwargs):
self._cobj = apply(db.DBSequence, args, kwargs)
self._cobj = db.DBSequence(*args, **kwargs)
def close(self, *args, **kwargs):
return apply(self._cobj.close, args, kwargs)
return self._cobj.close(*args, **kwargs)
def get(self, *args, **kwargs):
return apply(self._cobj.get, args, kwargs)
return self._cobj.get(*args, **kwargs)
def get_dbp(self, *args, **kwargs):
return apply(self._cobj.get_dbp, args, kwargs)
return self._cobj.get_dbp(*args, **kwargs)
def get_key(self, *args, **kwargs):
return apply(self._cobj.get_key, args, kwargs)
return self._cobj.get_key(*args, **kwargs)
def init_value(self, *args, **kwargs):
return apply(self._cobj.init_value, args, kwargs)
return self._cobj.init_value(*args, **kwargs)
def open(self, *args, **kwargs):
return apply(self._cobj.open, args, kwargs)
return self._cobj.open(*args, **kwargs)
def remove(self, *args, **kwargs):
return apply(self._cobj.remove, args, kwargs)
return self._cobj.remove(*args, **kwargs)
def stat(self, *args, **kwargs):
return apply(self._cobj.stat, args, kwargs)
return self._cobj.stat(*args, **kwargs)
def set_cachesize(self, *args, **kwargs):
return apply(self._cobj.set_cachesize, args, kwargs)
return self._cobj.set_cachesize(*args, **kwargs)
def set_flags(self, *args, **kwargs):
return apply(self._cobj.set_flags, args, kwargs)
return self._cobj.set_flags(*args, **kwargs)
def set_range(self, *args, **kwargs):
return apply(self._cobj.set_range, args, kwargs)
return self._cobj.set_range(*args, **kwargs)
def get_cachesize(self, *args, **kwargs):
return apply(self._cobj.get_cachesize, args, kwargs)
return self._cobj.get_cachesize(*args, **kwargs)
def get_flags(self, *args, **kwargs):
return apply(self._cobj.get_flags, args, kwargs)
return self._cobj.get_flags(*args, **kwargs)
def get_range(self, *args, **kwargs):
return apply(self._cobj.get_range, args, kwargs)
return self._cobj.get_range(*args, **kwargs)
......@@ -225,7 +225,7 @@ class DBShelf(DictMixin):
# given nothing is passed to the extension module. That way
# an exception can be raised if set_get_returns_none is turned
# off.
data = apply(self.db.get, args, kw)
data = self.db.get(*args, **kw)
try:
return cPickle.loads(data)
except (EOFError, TypeError, cPickle.UnpicklingError):
......@@ -294,7 +294,7 @@ class DBShelfCursor:
def get(self, *args):
count = len(args) # a method overloading hack
method = getattr(self, 'get_%d' % count)
apply(method, args)
method(*args)
def get_1(self, flags):
rec = self.dbc.get(flags)
......
......@@ -398,7 +398,7 @@ class bsdTableDB :
# column names
newcolumnlist = copy.copy(oldcolumnlist)
for c in columns:
if not oldcolumnhash.has_key(c):
if c not in oldcolumnhash:
newcolumnlist.append(c)
# store the table's new extended column list
......@@ -472,7 +472,7 @@ class bsdTableDB :
raise TableDBError, "unknown table"
# check the validity of each column name
if not self.__tablecolumns.has_key(table):
if not table in self.__tablecolumns:
self.__load_column_info(table)
for column in rowdict.keys() :
if not self.__tablecolumns[table].count(column):
......@@ -615,7 +615,7 @@ class bsdTableDB :
argument and returning a boolean.
"""
try:
if not self.__tablecolumns.has_key(table):
if table not in self.__tablecolumns:
self.__load_column_info(table)
if columns is None:
columns = self.__tablecolumns[table]
......@@ -639,7 +639,7 @@ class bsdTableDB :
argument and returning a boolean.
"""
# check the validity of each column name
if not self.__tablecolumns.has_key(table):
if not table in self.__tablecolumns:
self.__load_column_info(table)
if columns is None:
columns = self.tablecolumns[table]
......@@ -709,28 +709,24 @@ class bsdTableDB :
# extract the rowid from the key
rowid = key[-_rowid_str_len:]
if not rejected_rowids.has_key(rowid):
if not rowid in rejected_rowids:
# if no condition was specified or the condition
# succeeds, add row to our match list.
if not condition or condition(data):
if not matching_rowids.has_key(rowid):
if not rowid in matching_rowids:
matching_rowids[rowid] = {}
if savethiscolumndata:
matching_rowids[rowid][column] = data
else:
if matching_rowids.has_key(rowid):
if rowid in matching_rowids:
del matching_rowids[rowid]
rejected_rowids[rowid] = rowid
key, data = cur.next()
except db.DBError, dberror:
if sys.version_info[0] < 3 :
if dberror[0] != db.DB_NOTFOUND:
raise
else :
if dberror.args[0] != db.DB_NOTFOUND:
raise
if dberror.args[0] != db.DB_NOTFOUND:
raise
continue
cur.close()
......@@ -743,7 +739,7 @@ class bsdTableDB :
if len(columns) > 0:
for rowid, rowdata in matching_rowids.items():
for column in columns:
if rowdata.has_key(column):
if column in rowdata:
continue
try:
rowdata[column] = self.db.get(
......@@ -815,13 +811,10 @@ class bsdTableDB :
txn.commit()
txn = None
if self.__tablecolumns.has_key(table):
if table in self.__tablecolumns:
del self.__tablecolumns[table]
except db.DBError, dberror:
if txn:
txn.abort()
if sys.version_info[0] < 3 :
raise TableDBError, dberror[1]
else :
raise TableDBError, dberror.args[1]
raise TableDBError(dberror.args[1])
......@@ -61,7 +61,7 @@ def DeadlockWrap(function, *_args, **_kwargs):
"""
sleeptime = _deadlock_MinSleepTime
max_retries = _kwargs.get('max_retries', -1)
if _kwargs.has_key('max_retries'):
if 'max_retries' in _kwargs:
del _kwargs['max_retries']
while True:
try:
......
......@@ -707,8 +707,8 @@ class HTTPConnection:
if code != 200:
self.close()
raise socket.error, "Tunnel connection failed: %d %s" % (code,
message.strip())
raise socket.error("Tunnel connection failed: %d %s" % (code,
message.strip()))
while True:
line = response.fp.readline()
if line == '\r\n': break
......@@ -758,7 +758,7 @@ class HTTPConnection:
else:
self.sock.sendall(str)
except socket.error, v:
if v[0] == 32: # Broken pipe
if v.args[0] == 32: # Broken pipe
self.close()
raise
......@@ -914,7 +914,7 @@ class HTTPConnection:
self._send_request(method, url, body, headers)
except socket.error, v:
# trap 'Broken pipe' if we're allowed to automatically reconnect
if v[0] != 32 or not self.auto_open:
if v.args[0] != 32 or not self.auto_open:
raise
# try one more time
self._send_request(method, url, body, headers)
......
......@@ -100,7 +100,7 @@ def test_au(h, f):
else:
sample_bits = '?'
frame_size = sample_size * nchannels
return type, rate, nchannels, data_size/frame_size, sample_bits
return type, rate, nchannels, data_size//frame_size, sample_bits
tests.append(test_au)
......@@ -109,7 +109,7 @@ def test_hcom(h, f):
if h[65:69] != 'FSSD' or h[128:132] != 'HCOM':
return None
divisor = get_long_be(h[128+16:128+20])
return 'hcom', 22050/divisor, 1, -1, 8
return 'hcom', 22050//divisor, 1, -1, 8
tests.append(test_hcom)
......
......@@ -290,12 +290,16 @@ class _fileobject(object):
write_offset = 0
try:
while write_offset < data_size:
self._sock.sendall(buffer(data, write_offset, buffer_size))
with warnings.catch_warnings():
if sys.py3kwarning:
warnings.filterwarnings("ignore", ".*buffer",
DeprecationWarning)
self._sock.sendall(buffer(data, write_offset, buffer_size))
write_offset += buffer_size
finally:
if write_offset < data_size:
remainder = data[write_offset:]
del data # explicit free
del view, data # explicit free
self._wbuf.append(remainder)
self._wbuf_len = len(remainder)
......@@ -343,7 +347,7 @@ class _fileobject(object):
try:
data = self._sock.recv(rbufsize)
except error, e:
if e[0] == EINTR:
if e.args[0] == EINTR:
continue
raise
if not data:
......@@ -372,7 +376,7 @@ class _fileobject(object):
try:
data = self._sock.recv(left)
except error, e:
if e[0] == EINTR:
if e.args[0] == EINTR:
continue
raise
if not data:
......@@ -427,7 +431,7 @@ class _fileobject(object):
except error, e:
# The try..except to catch EINTR was moved outside the
# recv loop to avoid the per byte overhead.
if e[0] == EINTR:
if e.args[0] == EINTR:
continue
raise
break
......@@ -439,7 +443,7 @@ class _fileobject(object):
try:
data = self._sock.recv(self._rbufsize)
except error, e:
if e[0] == EINTR:
if e.args[0] == EINTR:
continue
raise
if not data:
......@@ -468,7 +472,7 @@ class _fileobject(object):
try:
data = self._sock.recv(self._rbufsize)
except error, e:
if e[0] == EINTR:
if e.args[0] == EINTR:
continue
raise
if not data:
......
......@@ -502,8 +502,8 @@ def uuid1(node=None, clock_seq=None):
nanoseconds = int(time.time() * 1e9)
# 0x01b21dd213814000 is the number of 100-ns intervals between the
# UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
timestamp = int(nanoseconds/100) + 0x01b21dd213814000L
if timestamp <= _last_timestamp:
timestamp = int(nanoseconds//100) + 0x01b21dd213814000L
if _last_timestamp is not None and timestamp <= _last_timestamp:
timestamp = _last_timestamp + 1
_last_timestamp = timestamp
if clock_seq is None:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment