Commit 9cd33ca4 authored by Julien Muchembled's avatar Julien Muchembled

Simplify code by using some new Python syntax

- PEP 308: Conditional Expressions
- PEP 341: Unified try/except/finally
- PEP 343: The ‘with’ statement
parent 48212534
...@@ -79,16 +79,15 @@ class ActivityBuffer(TM): ...@@ -79,16 +79,15 @@ class ActivityBuffer(TM):
def _finish(self): def _finish(self):
# LOG('ActivityBuffer', 0, '_finish %r' % (self,)) # LOG('ActivityBuffer', 0, '_finish %r' % (self,))
try: try:
try: # Try to push / delete all messages
# Try to push / delete all messages for activity, message in self.flushed_activity:
for activity, message in self.flushed_activity: activity.finishDeleteMessage(self._activity_tool_path, message)
activity.finishDeleteMessage(self._activity_tool_path, message) for activity, message in self.queued_activity:
for activity, message in self.queued_activity: activity.finishQueueMessage(self._activity_tool_path, message)
activity.finishQueueMessage(self._activity_tool_path, message) except:
except: LOG('ActivityBuffer', ERROR, "exception during _finish",
LOG('ActivityBuffer', ERROR, "exception during _finish", error=sys.exc_info())
error=sys.exc_info()) raise
raise
finally: finally:
self._clear() self._clear()
......
...@@ -877,16 +877,14 @@ class ActivityTool (Folder, UniqueObject): ...@@ -877,16 +877,14 @@ class ActivityTool (Folder, UniqueObject):
LOG('CMFActivity', INFO, "Shutdown: Activities finished.") LOG('CMFActivity', INFO, "Shutdown: Activities finished.")
def process_timer(self, tick, interval, prev="", next=""): def process_timer(self, tick, interval, prev="", next=""):
""" """
Call distribute() if we are the Distributing Node and call tic() Call distribute() if we are the Distributing Node and call tic()
with our node number. with our node number.
This method is called by TimerService in the interval given This method is called by TimerService in the interval given
in zope.conf. The Default is every 5 seconds. in zope.conf. The Default is every 5 seconds.
""" """
# Prevent TimerService from starting multiple threads in parallel # Prevent TimerService from starting multiple threads in parallel
acquired = timerservice_lock.acquire(0) if timerservice_lock.acquire(0):
if not acquired:
return
try: try:
# make sure our skin is set-up. On CMF 1.5 it's setup by acquisition, # make sure our skin is set-up. On CMF 1.5 it's setup by acquisition,
# but on 2.2 it's by traversal, and our site probably wasn't traversed # but on 2.2 it's by traversal, and our site probably wasn't traversed
...@@ -896,37 +894,37 @@ class ActivityTool (Folder, UniqueObject): ...@@ -896,37 +894,37 @@ class ActivityTool (Folder, UniqueObject):
self.setupCurrentSkin(self.REQUEST) self.setupCurrentSkin(self.REQUEST)
old_sm = getSecurityManager() old_sm = getSecurityManager()
try: try:
# get owner of portal_catalog, so normally we should be able to
# have the permission to invoke all activities
user = self.portal_catalog.getWrappedOwner()
newSecurityManager(self.REQUEST, user)
currentNode = self.getCurrentNode()
self.registerNode(currentNode)
processing_node_list = self.getNodeList(role=ROLE_PROCESSING)
# only distribute when we are the distributingNode
if self.getDistributingNode() == currentNode:
self.distribute(len(processing_node_list))
# SkinsTool uses a REQUEST cache to store skin objects, as
# with TimerService we have the same REQUEST over multiple
# portals, we clear this cache to make sure the cache doesn't
# contains skins from another portal.
try: try:
# get owner of portal_catalog, so normally we should be able to self.getPortalObject().portal_skins.changeSkin(None)
# have the permission to invoke all activities except AttributeError:
user = self.portal_catalog.getWrappedOwner() pass
newSecurityManager(self.REQUEST, user)
# call tic for the current processing_node
currentNode = self.getCurrentNode() # the processing_node numbers are the indices of the elements
self.registerNode(currentNode) # in the node tuple +1 because processing_node starts form 1
processing_node_list = self.getNodeList(role=ROLE_PROCESSING) if currentNode in processing_node_list:
self.tic(processing_node_list.index(currentNode) + 1)
# only distribute when we are the distributingNode except:
if (self.getDistributingNode() == currentNode): # Catch ALL exception to avoid killing timerserver.
self.distribute(len(processing_node_list)) LOG('ActivityTool', ERROR, 'process_timer received an exception',
error=sys.exc_info())
# SkinsTool uses a REQUEST cache to store skin objects, as
# with TimerService we have the same REQUEST over multiple
# portals, we clear this cache to make sure the cache doesn't
# contains skins from another portal.
try:
self.getPortalObject().portal_skins.changeSkin(None)
except AttributeError:
pass
# call tic for the current processing_node
# the processing_node numbers are the indices of the elements in the node tuple +1
# because processing_node starts form 1
if currentNode in processing_node_list:
self.tic(processing_node_list.index(currentNode)+1)
except:
# Catch ALL exception to avoid killing timerserver.
LOG('ActivityTool', ERROR, 'process_timer received an exception', error=sys.exc_info())
finally: finally:
setSecurityManager(old_sm) setSecurityManager(old_sm)
finally: finally:
...@@ -984,8 +982,7 @@ class ActivityTool (Folder, UniqueObject): ...@@ -984,8 +982,7 @@ class ActivityTool (Folder, UniqueObject):
while has_awake_activity: while has_awake_activity:
has_awake_activity = 0 has_awake_activity = 0
for activity in activity_list: for activity in activity_list:
acquired = is_running_lock.acquire(0) if is_running_lock.acquire(0):
if acquired:
try: try:
activity.tic(inner_self, processing_node) # Transaction processing is the responsability of the activity activity.tic(inner_self, processing_node) # Transaction processing is the responsability of the activity
has_awake_activity = has_awake_activity or activity.isAwake(inner_self, processing_node) has_awake_activity = has_awake_activity or activity.isAwake(inner_self, processing_node)
......
...@@ -192,7 +192,7 @@ class TradeModelPath(Path): ...@@ -192,7 +192,7 @@ class TradeModelPath(Path):
# * remove categories which base name is not category # * remove categories which base name is not category
# * respect base parameter # * respect base parameter
prefix = category + '/' prefix = category + '/'
start_index = not base and len(prefix) or 0 start_index = 0 if base else len(prefix)
return [category[start_index:] return [category[start_index:]
for category in category_list for category in category_list
if category.startswith(prefix)] if category.startswith(prefix)]
......
...@@ -179,11 +179,9 @@ def fixSkinNames(self, REQUEST=None, file=None, dry_run=0): ...@@ -179,11 +179,9 @@ def fixSkinNames(self, REQUEST=None, file=None, dry_run=0):
msg += 'This does not modify anything by default. If you really want to fix skin names, specify %s/ERP5Site_fixSkinNames?file=NAME&dry_run=0 \n\n' % self.absolute_url() msg += 'This does not modify anything by default. If you really want to fix skin names, specify %s/ERP5Site_fixSkinNames?file=NAME&dry_run=0 \n\n' % self.absolute_url()
return msg return msg
file = os.path.join(data_dir, file) with open(os.path.join(data_dir, file)) as file:
file = open(file, 'r') class NamingInformation: pass
class NamingInformation: pass info_list = []
info_list = []
try:
reader = csv.reader(file) reader = csv.reader(file)
for row in reader: for row in reader:
folder, name, new_name, meta_type = row[:4] folder, name, new_name, meta_type = row[:4]
...@@ -205,8 +203,6 @@ def fixSkinNames(self, REQUEST=None, file=None, dry_run=0): ...@@ -205,8 +203,6 @@ def fixSkinNames(self, REQUEST=None, file=None, dry_run=0):
info.regexp = re.compile('\\b' + re.escape(name) + '\\b') # This is used to search the name info.regexp = re.compile('\\b' + re.escape(name) + '\\b') # This is used to search the name
info.removed = removed info.removed = removed
info_list.append(info) info_list.append(info)
finally:
file.close()
# Now we have information enough. Check the skins. # Now we have information enough. Check the skins.
msg = '' msg = ''
...@@ -232,11 +228,8 @@ def fixSkinNames(self, REQUEST=None, file=None, dry_run=0): ...@@ -232,11 +228,8 @@ def fixSkinNames(self, REQUEST=None, file=None, dry_run=0):
msg += '%s\n' % line msg += '%s\n' % line
if not dry_run: if not dry_run:
if skin.meta_type in fs_skin_spec: if skin.meta_type in fs_skin_spec:
f = open(expandpath(skin.getObjectFSPath()), 'w') with open(expandpath(skin.getObjectFSPath()), 'w') as f:
try:
f.write(text) f.write(text)
finally:
f.close()
else: else:
REQUEST['BODY'] = text REQUEST['BODY'] = text
skin.manage_FTPput(REQUEST, REQUEST.RESPONSE) skin.manage_FTPput(REQUEST, REQUEST.RESPONSE)
......
...@@ -206,8 +206,7 @@ class AlarmTool(BaseTool): ...@@ -206,8 +206,7 @@ class AlarmTool(BaseTool):
This method is called by TimerService in the interval given This method is called by TimerService in the interval given
in zope.conf. The Default is every 5 seconds. in zope.conf. The Default is every 5 seconds.
""" """
acquired = last_tic_lock.acquire(0) if not last_tic_lock.acquire(0):
if not acquired:
return return
try: try:
# make sure our skin is set-up. On CMF 1.5 it's setup by acquisition, # make sure our skin is set-up. On CMF 1.5 it's setup by acquisition,
......
...@@ -141,8 +141,10 @@ class IntrospectionTool(LogMixin, BaseTool): ...@@ -141,8 +141,10 @@ class IntrospectionTool(LogMixin, BaseTool):
if compressed: if compressed:
tmp_file_path = tempfile.mktemp(dir=tmp_file_path) tmp_file_path = tempfile.mktemp(dir=tmp_file_path)
tmp_file = tarfile.open(tmp_file_path,"w:gz") tmp_file = tarfile.open(tmp_file_path,"w:gz")
tmp_file.add(file_path) try:
tmp_file.close() tmp_file.add(file_path)
finally:
tmp_file.close()
RESPONSE.setHeader('Content-type', 'application/x-tar') RESPONSE.setHeader('Content-type', 'application/x-tar')
RESPONSE.setHeader('Content-Disposition', \ RESPONSE.setHeader('Content-Disposition', \
'attachment;filename="%s.tar.gz"' % file_path.split('/')[-1]) 'attachment;filename="%s.tar.gz"' % file_path.split('/')[-1])
...@@ -154,13 +156,10 @@ class IntrospectionTool(LogMixin, BaseTool): ...@@ -154,13 +156,10 @@ class IntrospectionTool(LogMixin, BaseTool):
tmp_file_path = file_path tmp_file_path = file_path
f = open(tmp_file_path) with open(tmp_file_path) as f:
try:
RESPONSE.setHeader('Content-Length', os.stat(tmp_file_path).st_size) RESPONSE.setHeader('Content-Length', os.stat(tmp_file_path).st_size)
for data in f: for data in f:
RESPONSE.write(data) RESPONSE.write(data)
finally:
f.close()
if compressed: if compressed:
os.remove(tmp_file_path) os.remove(tmp_file_path)
...@@ -190,30 +189,21 @@ class IntrospectionTool(LogMixin, BaseTool): ...@@ -190,30 +189,21 @@ class IntrospectionTool(LogMixin, BaseTool):
char_per_line = 75 char_per_line = 75
tailed_file = open(log_file,'r') with open(log_file,'r') as tailed_file:
while 1: while 1:
try: try:
tailed_file.seek(-1 * char_per_line * line_number, 2) tailed_file.seek(-1 * char_per_line * line_number, 2)
except IOError: except IOError:
tailed_file.seek(0) tailed_file.seek(0)
if tailed_file.tell() == 0: pos = tailed_file.tell()
at_start = 1
else: lines = tailed_file.read().split("\n")
at_start = 0 if len(lines) > (line_number + 1) or not pos:
break
lines = tailed_file.read().split("\n") # The lines are bigger than we thought
if (len(lines) > (line_number + 1)) or at_start: char_per_line *= 1.3 # Inc for retry
break
# The lines are bigger than we thought start = max(len(lines) - line_number - 1, 0)
char_per_line = char_per_line * 1.3 # Inc for retry
tailed_file.close()
if len(lines) > line_number:
start = len(lines) - line_number - 1
else:
start = 0
return "\n".join(lines[start:len(lines)]) return "\n".join(lines[start:len(lines)])
security.declareProtected(Permissions.ManagePortal, 'tailEventLog') security.declareProtected(Permissions.ManagePortal, 'tailEventLog')
...@@ -330,20 +320,17 @@ class IntrospectionTool(LogMixin, BaseTool): ...@@ -330,20 +320,17 @@ class IntrospectionTool(LogMixin, BaseTool):
""" """
def cached_getSystemVersionDict(): def cached_getSystemVersionDict():
import pkg_resources import pkg_resources
def tuple_to_format_str(t):
return '.'.join([str(i) for i in t])
version_dict = {} version_dict = {}
for dist in pkg_resources.working_set: for dist in pkg_resources.working_set:
version_dict[dist.key] = dist.version version_dict[dist.key] = dist.version
from Products import ERP5 as erp5_product from Products import ERP5 as erp5_product
erp5_product_path = erp5_product.__file__.split("/")[:-1] erp5_product_path = os.path.dirname(erp5_product.__file__)
try: try:
erp5_v = open("/".join((erp5_product_path) + ["VERSION.txt"])).read().strip() with open(os.path.join(erp5_product_path, "VERSION.txt")) as f:
erp5_version = erp5_v.replace("ERP5 ", "") erp5_version = f.read().strip().replace("ERP5 ", "")
except: except Exception:
erp5_version = None erp5_version = None
version_dict["ProductS.ERP5"] = erp5_version version_dict["ProductS.ERP5"] = erp5_version
return version_dict return version_dict
......
...@@ -298,13 +298,10 @@ class TemplateTool (BaseTool): ...@@ -298,13 +298,10 @@ class TemplateTool (BaseTool):
""" """
Import template from a temp file (as uploaded by the user) Import template from a temp file (as uploaded by the user)
""" """
file = open(path, 'rb') with open(path, 'rb') as file:
try:
# read magic key to determine wich kind of bt we use # read magic key to determine wich kind of bt we use
file.seek(0) file.seek(0)
magic = file.read(5) magic = file.read(5)
finally:
file.close()
if magic == '<?xml': # old version if magic == '<?xml': # old version
self._importObjectFromFile(path, id=id) self._importObjectFromFile(path, id=id)
...@@ -342,11 +339,8 @@ class TemplateTool (BaseTool): ...@@ -342,11 +339,8 @@ class TemplateTool (BaseTool):
prop_dict.pop('id', '') prop_dict.pop('id', '')
bt.edit(**prop_dict) bt.edit(**prop_dict)
# import all other files from bt # import all other files from bt
fobj = open(path, 'rb') with open(path, 'rb') as fobj:
try:
bt.importFile(file=fobj) bt.importFile(file=fobj)
finally:
fobj.close()
finally: finally:
tar.close() tar.close()
return bt return bt
...@@ -398,7 +392,8 @@ class TemplateTool (BaseTool): ...@@ -398,7 +392,8 @@ class TemplateTool (BaseTool):
if not os.path.exists(prop_path): if not os.path.exists(prop_path):
value = None value = None
else: else:
value = open(prop_path, 'rb').read() with open(prop_path, 'rb') as f:
value = f.read()
if value is 'None': if value is 'None':
# At export time, we used to export non-existent properties: # At export time, we used to export non-existent properties:
# str(obj.getProperty('non-existing')) == 'None' # str(obj.getProperty('non-existing')) == 'None'
...@@ -523,11 +518,8 @@ class TemplateTool (BaseTool): ...@@ -523,11 +518,8 @@ class TemplateTool (BaseTool):
tempid, temppath = mkstemp() tempid, temppath = mkstemp()
try: try:
os.close(tempid) # Close the opened fd as soon as possible os.close(tempid) # Close the opened fd as soon as possible
tempfile = open(temppath, 'wb') with open(temppath, 'wb') as tempfile:
try:
tempfile.write(import_file.read()) tempfile.write(import_file.read())
finally:
tempfile.close()
bt = self._importBT(temppath, id) bt = self._importBT(temppath, id)
finally: finally:
os.remove(temppath) os.remove(temppath)
......
...@@ -427,13 +427,8 @@ class TestERP5Catalog(ERP5TypeTestCase, LogInterceptor): ...@@ -427,13 +427,8 @@ class TestERP5Catalog(ERP5TypeTestCase, LogInterceptor):
#if uid_buffer_key in uid_buffer_dict: #if uid_buffer_key in uid_buffer_dict:
# del uid_buffer_dict[uid_buffer_key] # del uid_buffer_dict[uid_buffer_key]
def getUIDBuffer(*args, **kw): def getUIDBuffer(*args, **kw):
uid_lock = catalog.__class__._reserved_uid_lock with catalog.__class__._reserved_uid_lock:
uid_lock.acquire() return catalog.getUIDBuffer(*args, **kw)
try:
result = catalog.getUIDBuffer(*args, **kw)
finally:
uid_lock.release()
return result
getUIDBuffer(force_new_buffer=True) getUIDBuffer(force_new_buffer=True)
......
...@@ -2343,8 +2343,8 @@ class ListBoxHTMLRendererLine(ListBoxRendererLine): ...@@ -2343,8 +2343,8 @@ class ListBoxHTMLRendererLine(ListBoxRendererLine):
url_column_dict = dict(renderer.getUrlColumnList()) url_column_dict = dict(renderer.getUrlColumnList())
selection = renderer.getSelection() selection = renderer.getSelection()
selection_name = renderer.getSelectionName() selection_name = renderer.getSelectionName()
ignore_layout = int(request.get('ignore_layout', \ ignore_layout = int(request.get('ignore_layout',
not request.get('is_web_mode', False) and 1 or 0)) 0 if request.get('is_web_mode') else 1))
editable_mode = int(request.get('editable_mode', 0)) editable_mode = int(request.get('editable_mode', 0))
ui_domain = 'erp5_ui' ui_domain = 'erp5_ui'
# We need a way to pass the current line object (ie. brain) to the # We need a way to pass the current line object (ie. brain) to the
......
...@@ -976,33 +976,25 @@ class %s(Constraint): ...@@ -976,33 +976,25 @@ class %s(Constraint):
# Create an empty __init__.py. # Create an empty __init__.py.
init = os.path.join(path, '__init__.py') init = os.path.join(path, '__init__.py')
if not os.path.exists(init): if not os.path.exists(init):
f = open(init, 'w') open(init, 'w').close()
f.close()
# For convenience, make .cvsignore. # For convenience, make .cvsignore.
if generate_cvsignore: if generate_cvsignore:
cvsignore = os.path.join(path, '.cvsignore') cvsignore = os.path.join(path, '.cvsignore')
if not os.path.exists(cvsignore): if not os.path.exists(cvsignore):
f = open(cvsignore, 'w') with open(cvsignore, 'w') as f:
try:
f.write('*.pyc' + os.linesep) f.write('*.pyc' + os.linesep)
finally:
f.close()
# Create a Permissions module for this Product. # Create a Permissions module for this Product.
permissions = os.path.join(base_path, 'Permissions.py') permissions = os.path.join(base_path, 'Permissions.py')
if not os.path.exists(permissions): if not os.path.exists(permissions):
f = open(permissions, 'w') open(permissions, 'w').close()
f.close()
# Make .cvsignore for convenience. # Make .cvsignore for convenience.
if generate_cvsignore: if generate_cvsignore:
cvsignore = os.path.join(base_path, '.cvsignore') cvsignore = os.path.join(base_path, '.cvsignore')
if not os.path.exists(cvsignore): if not os.path.exists(cvsignore):
f = open(cvsignore, 'w') with open(cvsignore, 'w') as f:
try:
f.write('*.pyc' + os.linesep) f.write('*.pyc' + os.linesep)
finally:
f.close()
# Create an init file for this Product. # Create an init file for this Product.
init = os.path.join(base_path, '__init__.py') init = os.path.join(base_path, '__init__.py')
...@@ -1055,11 +1047,8 @@ def initialize( context ): ...@@ -1055,11 +1047,8 @@ def initialize( context ):
content_constructors = (), content_constructors = (),
content_classes = ()) content_classes = ())
''' % COPYRIGHT ''' % COPYRIGHT
f = open(init, 'w') with open(init, 'w') as f:
try:
f.write(text) f.write(text)
finally:
f.close()
# Create a skeleton README.txt. # Create a skeleton README.txt.
readme = os.path.join(base_path, 'README.txt') readme = os.path.join(base_path, 'README.txt')
...@@ -1069,11 +1058,8 @@ def initialize( context ): ...@@ -1069,11 +1058,8 @@ def initialize( context ):
%s was automatically generated by ERP5 Class Tool. %s was automatically generated by ERP5 Class Tool.
''' % (product_id, product_id) ''' % (product_id, product_id)
f = open(readme, 'w') with open(readme, 'w') as f:
try:
f.write(text) f.write(text)
finally:
f.close()
# Now, copy selected code. # Now, copy selected code.
for d, m, id_list in (('Document', readLocalDocument, document_id_list), for d, m, id_list in (('Document', readLocalDocument, document_id_list),
...@@ -1084,11 +1070,8 @@ def initialize( context ): ...@@ -1084,11 +1070,8 @@ def initialize( context ):
for class_id in id_list: for class_id in id_list:
path = os.path.join(base_path, d, class_id) + '.py' path = os.path.join(base_path, d, class_id) + '.py'
text = m(class_id) text = m(class_id)
f = open(path, 'w') with open(path, 'w') as f:
try:
f.write(text) f.write(text)
finally:
f.close()
if REQUEST is not None: if REQUEST is not None:
REQUEST.RESPONSE.redirect('%s/manage_viewProductGeneration?manage_tabs_message=New+Product+Saved+In+%s' % (self.absolute_url(), base_path)) REQUEST.RESPONSE.redirect('%s/manage_viewProductGeneration?manage_tabs_message=New+Product+Saved+In+%s' % (self.absolute_url(), base_path))
......
...@@ -536,12 +536,8 @@ def readLocalPropertySheet(class_id): ...@@ -536,12 +536,8 @@ def readLocalPropertySheet(class_id):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
path = os.path.join(instance_home, "PropertySheet") path = os.path.join(instance_home, "PropertySheet")
path = os.path.join(path, "%s.py" % class_id) path = os.path.join(path, "%s.py" % class_id)
f = open(path) with open(path) as f:
try: return f.read()
text = f.read()
finally:
f.close()
return text
def writeLocalPropertySheet(class_id, text, create=1, instance_home=None): def writeLocalPropertySheet(class_id, text, create=1, instance_home=None):
if instance_home is None: if instance_home is None:
...@@ -554,11 +550,8 @@ def writeLocalPropertySheet(class_id, text, create=1, instance_home=None): ...@@ -554,11 +550,8 @@ def writeLocalPropertySheet(class_id, text, create=1, instance_home=None):
if create: if create:
if os.path.exists(path): if os.path.exists(path):
raise IOError, 'the file %s is already present' % path raise IOError, 'the file %s is already present' % path
f = open(path, 'w') with open(path, 'w') as f:
try:
f.write(text) f.write(text)
finally:
f.close()
# load the file, so that an error is raised if file is invalid # load the file, so that an error is raised if file is invalid
module = imp.load_source(class_id, path) module = imp.load_source(class_id, path)
getattr(module, class_id) getattr(module, class_id)
...@@ -570,8 +563,7 @@ def importLocalPropertySheet(class_id, path = None): ...@@ -570,8 +563,7 @@ def importLocalPropertySheet(class_id, path = None):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
path = os.path.join(instance_home, "PropertySheet") path = os.path.join(instance_home, "PropertySheet")
path = os.path.join(path, "%s.py" % class_id) path = os.path.join(path, "%s.py" % class_id)
f = open(path) with open(path) as f:
try:
module = imp.load_source(class_id, path, f) module = imp.load_source(class_id, path, f)
klass = None klass = None
try: try:
...@@ -582,8 +574,6 @@ def importLocalPropertySheet(class_id, path = None): ...@@ -582,8 +574,6 @@ def importLocalPropertySheet(class_id, path = None):
setattr(PropertySheet, class_id, klass) setattr(PropertySheet, class_id, klass)
# Register base categories # Register base categories
registerBaseCategories(klass) registerBaseCategories(klass)
finally:
f.close()
base_category_dict = {} base_category_dict = {}
def registerBaseCategories(property_sheet): def registerBaseCategories(property_sheet):
...@@ -608,11 +598,8 @@ def importLocalInterface(module_id, path = None, is_erp5_type=False): ...@@ -608,11 +598,8 @@ def importLocalInterface(module_id, path = None, is_erp5_type=False):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
path = os.path.join(instance_home, "interfaces") path = os.path.join(instance_home, "interfaces")
path = os.path.join(path, "%s.py" % module_id) path = os.path.join(path, "%s.py" % module_id)
f = open(path) with open(path) as f:
try:
module = imp.load_source(class_id, path, f) module = imp.load_source(class_id, path, f)
finally:
f.close()
from zope.interface import Interface from zope.interface import Interface
from Products.ERP5Type import interfaces from Products.ERP5Type import interfaces
InterfaceClass = type(Interface) InterfaceClass = type(Interface)
...@@ -627,12 +614,9 @@ def importLocalConstraint(class_id, path = None): ...@@ -627,12 +614,9 @@ def importLocalConstraint(class_id, path = None):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
path = os.path.join(instance_home, "Constraint") path = os.path.join(instance_home, "Constraint")
path = os.path.join(path, "%s.py" % class_id) path = os.path.join(path, "%s.py" % class_id)
f = open(path) with open(path) as f:
try:
module = imp.load_source(class_id, path, f) module = imp.load_source(class_id, path, f)
setattr(Products.ERP5Type.Constraint, class_id, getattr(module, class_id)) setattr(Products.ERP5Type.Constraint, class_id, getattr(module, class_id))
finally:
f.close()
def importLocalInteractor(class_id, path=None): def importLocalInteractor(class_id, path=None):
import Products.ERP5Type.Interactor import Products.ERP5Type.Interactor
...@@ -640,13 +624,10 @@ def importLocalInteractor(class_id, path=None): ...@@ -640,13 +624,10 @@ def importLocalInteractor(class_id, path=None):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
path = os.path.join(instance_home, "Interactor") path = os.path.join(instance_home, "Interactor")
path = os.path.join(path, "%s.py" % class_id) path = os.path.join(path, "%s.py" % class_id)
f = open(path) with open(path) as f:
try:
module = imp.load_source(class_id, path, f) module = imp.load_source(class_id, path, f)
setattr(Products.ERP5Type.Interactor, class_id, getattr(module, class_id)) setattr(Products.ERP5Type.Interactor, class_id, getattr(module, class_id))
registerInteractorClass(class_id, getattr(Products.ERP5Type.Interactor, class_id)) registerInteractorClass(class_id, getattr(Products.ERP5Type.Interactor, class_id))
finally:
f.close()
def getLocalExtensionList(): def getLocalExtensionList():
if not getConfiguration: if not getConfiguration:
...@@ -699,12 +680,8 @@ def readLocalExtension(class_id): ...@@ -699,12 +680,8 @@ def readLocalExtension(class_id):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
path = os.path.join(instance_home, "Extensions") path = os.path.join(instance_home, "Extensions")
path = os.path.join(path, "%s.py" % class_id) path = os.path.join(path, "%s.py" % class_id)
f = open(path) with open(path) as f:
try: return f.read()
text = f.read()
finally:
f.close()
return text
def removeLocalTest(class_id): def removeLocalTest(class_id):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
...@@ -718,23 +695,15 @@ def readLocalTest(class_id): ...@@ -718,23 +695,15 @@ def readLocalTest(class_id):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
path = os.path.join(instance_home, "tests") path = os.path.join(instance_home, "tests")
path = os.path.join(path, "%s.py" % class_id) path = os.path.join(path, "%s.py" % class_id)
f = open(path) with open(path) as f:
try: return f.read()
text = f.read()
finally:
f.close()
return text
def readLocalConstraint(class_id): def readLocalConstraint(class_id):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
path = os.path.join(instance_home, "Constraint") path = os.path.join(instance_home, "Constraint")
path = os.path.join(path, "%s.py" % class_id) path = os.path.join(path, "%s.py" % class_id)
f = open(path) with open(path) as f:
try: return f.read()
text = f.read()
finally:
f.close()
return text
def writeLocalExtension(class_id, text, create=1, instance_home=None): def writeLocalExtension(class_id, text, create=1, instance_home=None):
if instance_home is None: if instance_home is None:
...@@ -747,11 +716,8 @@ def writeLocalExtension(class_id, text, create=1, instance_home=None): ...@@ -747,11 +716,8 @@ def writeLocalExtension(class_id, text, create=1, instance_home=None):
if create: if create:
if os.path.exists(path): if os.path.exists(path):
raise IOError, 'the file %s is already present' % path raise IOError, 'the file %s is already present' % path
f = open(path, 'w') with open(path, 'w') as f:
try:
f.write(text) f.write(text)
finally:
f.close()
def writeLocalTest(class_id, text, create=1, instance_home=None): def writeLocalTest(class_id, text, create=1, instance_home=None):
if instance_home is None: if instance_home is None:
...@@ -764,11 +730,8 @@ def writeLocalTest(class_id, text, create=1, instance_home=None): ...@@ -764,11 +730,8 @@ def writeLocalTest(class_id, text, create=1, instance_home=None):
if create: if create:
if os.path.exists(path): if os.path.exists(path):
raise IOError, 'the file %s is already present' % path raise IOError, 'the file %s is already present' % path
f = open(path, 'w') with open(path, 'w') as f:
try:
f.write(text) f.write(text)
finally:
f.close()
def writeLocalConstraint(class_id, text, create=1, instance_home=None): def writeLocalConstraint(class_id, text, create=1, instance_home=None):
if instance_home is None: if instance_home is None:
...@@ -781,11 +744,8 @@ def writeLocalConstraint(class_id, text, create=1, instance_home=None): ...@@ -781,11 +744,8 @@ def writeLocalConstraint(class_id, text, create=1, instance_home=None):
if create: if create:
if os.path.exists(path): if os.path.exists(path):
raise IOError, 'the file %s is already present' % path raise IOError, 'the file %s is already present' % path
f = open(path, 'w') with open(path, 'w') as f:
try:
f.write(text) f.write(text)
finally:
f.close()
# load the file, so that an error is raised if file is invalid # load the file, so that an error is raised if file is invalid
module = imp.load_source(class_id, path) module = imp.load_source(class_id, path)
getattr(module, class_id) getattr(module, class_id)
...@@ -829,12 +789,8 @@ def readLocalDocument(class_id): ...@@ -829,12 +789,8 @@ def readLocalDocument(class_id):
instance_home = getConfiguration().instancehome instance_home = getConfiguration().instancehome
path = os.path.join(instance_home, "Document") path = os.path.join(instance_home, "Document")
path = os.path.join(path, "%s.py" % class_id) path = os.path.join(path, "%s.py" % class_id)
f = open(path) with open(path) as f:
try: return f.read()
text = f.read()
finally:
f.close()
return text
def writeLocalDocument(class_id, text, create=1, instance_home=None): def writeLocalDocument(class_id, text, create=1, instance_home=None):
if instance_home is None: if instance_home is None:
...@@ -849,11 +805,8 @@ def writeLocalDocument(class_id, text, create=1, instance_home=None): ...@@ -849,11 +805,8 @@ def writeLocalDocument(class_id, text, create=1, instance_home=None):
raise IOError, 'the file %s is already present' % path raise IOError, 'the file %s is already present' % path
# check there is no syntax error (that's the most we can do at this time) # check there is no syntax error (that's the most we can do at this time)
compile(text, path, 'exec') compile(text, path, 'exec')
f = open(path, 'w') with open(path, 'w') as f:
try:
f.write(text) f.write(text)
finally:
f.close()
def setDefaultClassProperties(property_holder): def setDefaultClassProperties(property_holder):
"""Initialize default properties for ERP5Type Documents. """Initialize default properties for ERP5Type Documents.
......
...@@ -287,47 +287,46 @@ class PortalTypeMetaClass(GhostBaseMetaClass, PropertyHolder): ...@@ -287,47 +287,46 @@ class PortalTypeMetaClass(GhostBaseMetaClass, PropertyHolder):
ERP5Base.aq_method_lock.acquire() ERP5Base.aq_method_lock.acquire()
try: try:
try: try:
try: class_definition = generatePortalTypeClass(site, portal_type)
class_definition = generatePortalTypeClass(site, portal_type) except AttributeError:
except AttributeError: LOG("ERP5Type.Dynamic", WARNING,
LOG("ERP5Type.Dynamic", WARNING, "Could not access Portal Type Object for type %r"
"Could not access Portal Type Object for type %r" % portal_type, error=sys.exc_info())
% portal_type, error=sys.exc_info()) base_tuple = (ERP5BaseBroken, )
base_tuple = (ERP5BaseBroken, ) portal_type_category_list = []
portal_type_category_list = [] attribute_dict = dict(_categories=[], constraints=[])
attribute_dict = dict(_categories=[], constraints=[]) interface_list = []
interface_list = [] else:
else: base_tuple, portal_type_category_list, \
base_tuple, portal_type_category_list, \ interface_list, attribute_dict = class_definition
interface_list, attribute_dict = class_definition
klass.__isghost__ = False
klass.__isghost__ = False klass.__bases__ = base_tuple
klass.__bases__ = base_tuple
klass.resetAcquisition()
klass.resetAcquisition()
for key, value in attribute_dict.iteritems():
for key, value in attribute_dict.iteritems(): setattr(klass, key, value)
setattr(klass, key, value)
if getattr(klass.__setstate__, 'im_func', None) is \
if getattr(klass.__setstate__, 'im_func', None) is \ persistent_migration.__setstate__:
persistent_migration.__setstate__: # optimization to reduce overhead of compatibility code
# optimization to reduce overhead of compatibility code klass.__setstate__ = persistent_migration.Base__setstate__
klass.__setstate__ = persistent_migration.Base__setstate__
for interface in interface_list:
for interface in interface_list: classImplements(klass, interface)
classImplements(klass, interface)
# skip this during the early Base Type / Types Tool generation
# skip this during the early Base Type / Types Tool generation # because they dont have accessors, and will mess up
# because they dont have accessors, and will mess up # workflow methods. We KNOW that we will re-load this type anyway
# workflow methods. We KNOW that we will re-load this type anyway if len(base_tuple) > 1:
if len(base_tuple) > 1: klass.generatePortalTypeAccessors(site, portal_type_category_list)
klass.generatePortalTypeAccessors(site, portal_type_category_list) # need to set %s__roles__ for generated methods
# need to set %s__roles__ for generated methods cls.setupSecurity()
cls.setupSecurity()
except Exception:
except Exception: import traceback; traceback.print_exc()
import traceback; traceback.print_exc() raise
raise
finally: finally:
ERP5Base.aq_method_lock.release() ERP5Base.aq_method_lock.release()
......
...@@ -27,8 +27,7 @@ if '__getstate__' not in Uninstalled.BrokenClass.__dict__: ...@@ -27,8 +27,7 @@ if '__getstate__' not in Uninstalled.BrokenClass.__dict__:
cache = Uninstalled.broken_klasses cache = Uninstalled.broken_klasses
def Broken(self, oid, pair): def Broken(self, oid, pair):
lock.acquire() with lock:
try:
cached = pair in cache cached = pair in cache
result = Uninstalled_Broken(self, oid, pair) result = Uninstalled_Broken(self, oid, pair)
if not cached: if not cached:
...@@ -36,8 +35,6 @@ if '__getstate__' not in Uninstalled.BrokenClass.__dict__: ...@@ -36,8 +35,6 @@ if '__getstate__' not in Uninstalled.BrokenClass.__dict__:
assert not issubclass(klass, PersistentBroken), \ assert not issubclass(klass, PersistentBroken), \
"This monkey-patch is not useful anymore" "This monkey-patch is not useful anymore"
cache[pair] = persistentBroken(klass) cache[pair] = persistentBroken(klass)
finally:
lock.release()
return result return result
Uninstalled.Broken = Broken Uninstalled.Broken = Broken
...@@ -15,14 +15,11 @@ class DummyTaskDistributionTool(object): ...@@ -15,14 +15,11 @@ class DummyTaskDistributionTool(object):
return None, revision return None, revision
def startUnitTest(self, test_result_path, exclude_list=()): def startUnitTest(self, test_result_path, exclude_list=()):
self.lock.acquire() with self.lock:
try:
for i, test in enumerate(self.test_name_list): for i, test in enumerate(self.test_name_list):
if test not in exclude_list: if test not in exclude_list:
del self.test_name_list[i] del self.test_name_list[i]
return None, test return None, test
finally:
self.lock.release()
def stopUnitTest(self, test_path, status_dict): def stopUnitTest(self, test_path, status_dict):
pass pass
\ No newline at end of file
...@@ -143,12 +143,8 @@ class Browser: ...@@ -143,12 +143,8 @@ class Browser:
def _createFile(self, filename, content): def _createFile(self, filename, content):
file_path = os.path.join(self.profile_dir, filename) file_path = os.path.join(self.profile_dir, filename)
f = open(file_path, 'w') with open(file_path, 'w') as f:
try:
f.write(content) f.write(content)
finally:
f.close()
return file_path return file_path
def _setDisplay(self, display): def _setDisplay(self, display):
......
...@@ -289,11 +289,9 @@ class TestERP5Type(PropertySheetTestCase, LogInterceptor): ...@@ -289,11 +289,9 @@ class TestERP5Type(PropertySheetTestCase, LogInterceptor):
raise WriteError raise WriteError
portal.person_module.__class__._setLastId = _setLastId portal.person_module.__class__._setLastId = _setLastId
try: try:
try: o = portal.person_module.newContent(portal_type="Person", temp_object=1)
o = portal.person_module.newContent(portal_type="Person", except WriteError:
temp_object=1) self.fail("Container last ID modified")
except WriteError:
self.fail("Container last ID modified")
finally: finally:
del portal.person_module.__class__._setLastId del portal.person_module.__class__._setLastId
......
...@@ -323,7 +323,7 @@ class Git(WorkingCopy): ...@@ -323,7 +323,7 @@ class Git(WorkingCopy):
if push: if push:
# if we can't push because we are not up-to-date, we'll either 'merge' or # if we can't push because we are not up-to-date, we'll either 'merge' or
# 'rebase' depending on we already have local commits or not # 'rebase' depending on we already have local commits or not
merge = self.getAheadCount() and 'merge' or 'rebase' merge = 'merge' if self.getAheadCount() else 'rebase'
selected_set = set(added) selected_set = set(added)
selected_set.update(modified) selected_set.update(modified)
......
...@@ -48,16 +48,13 @@ _chdir_lock = threading.RLock() ...@@ -48,16 +48,13 @@ _chdir_lock = threading.RLock()
@simple_decorator @simple_decorator
def chdir_working_copy(func): def chdir_working_copy(func):
def decorator(self, *args, **kw): def decorator(self, *args, **kw):
_chdir_lock.acquire() with _chdir_lock:
try:
cwd = os.getcwd() cwd = os.getcwd()
try: try:
os.chdir(self.working_copy) os.chdir(self.working_copy)
return func(self, *args, **kw) return func(self, *args, **kw)
finally: finally:
os.chdir(cwd) os.chdir(cwd)
finally:
_chdir_lock.release()
return decorator return decorator
class Subversion(WorkingCopy): class Subversion(WorkingCopy):
......
...@@ -209,11 +209,8 @@ class WorkingCopy(Implicit): ...@@ -209,11 +209,8 @@ class WorkingCopy(Implicit):
revision = int(self.showOld(path)) + 1 revision = int(self.showOld(path)) + 1
except NotVersionedError: except NotVersionedError:
return 1 return 1
file = open(os.path.join(self.working_copy, path), 'w') with open(os.path.join(self.working_copy, path), 'w') as file:
try:
file.write(str(revision)) file.write(str(revision))
finally:
file.close()
return revision return revision
def hasDiff(self, path): def hasDiff(self, path):
...@@ -279,11 +276,8 @@ class WorkingCopy(Implicit): ...@@ -279,11 +276,8 @@ class WorkingCopy(Implicit):
head = '<span style="font-weight: bold; color: black;">%s</span>' \ head = '<span style="font-weight: bold; color: black;">%s</span>' \
% real_path % real_path
try: try:
f = open(os.path.join(self.working_copy, path), 'rU') with open(os.path.join(self.working_copy, path), 'rU') as f:
try:
text = f.read() text = f.read()
finally:
f.close()
except IOError, e: except IOError, e:
if e.errno == errno.EISDIR: if e.errno == errno.EISDIR:
return '%s<hr/>%r is a folder!' % (head, path) return '%s<hr/>%r is a folder!' % (head, path)
......
...@@ -214,43 +214,41 @@ class EGOVUserManager(ERP5UserManager): ...@@ -214,43 +214,41 @@ class EGOVUserManager(ERP5UserManager):
sm = getSecurityManager() sm = getSecurityManager()
if sm.getUser().getId() != SUPER_USER: if sm.getUser().getId() != SUPER_USER:
newSecurityManager(self, self.getUser(SUPER_USER)) newSecurityManager(self, self.getUser(SUPER_USER))
try: try:
try: result = portal.portal_catalog.unrestrictedSearchResults(
result = portal.portal_catalog.unrestrictedSearchResults(
select_expression='reference', select_expression='reference',
portal_type=self.portal_type_list, reference=login) portal_type=self.portal_type_list, reference=login)
if len(result) != 1: # we won't proceed with groups if len(result) != 1: # we won't proceed with groups
if len(result) > 1: # configuration is screwed if len(result) > 1: # configuration is screwed
raise ConsistencyError, 'There is more than one Person whose \ raise ConsistencyError('There is more than one Person whose'
login is %s : %s' % (user_name, ' login is %s : %s' % (user_name,
repr([r.getObject() for r in catalog_result])) repr([r.getObject() for r in catalog_result]))
else: # no person is linked to this user login else: # no person is linked to this user login
# this permit to get the module of the application # this permit to get the module of the application
# the goal is to work with anonymous applications, even if # the goal is to work with anonymous applications, even if
# they are not reindexed # they are not reindexed
module_id = self.REQUEST.get('anonymous_module', None) module_id = self.REQUEST.get('anonymous_module', None)
if module_id: if module_id:
module = getattr(portal, module_id, None) module = getattr(portal, module_id, None)
if module is not None: if module is not None:
result = module._getOb(login[0], None) result = module._getOb(login[0], None)
if result is not None: if result is not None:
return [result.getPath(),] return [result.getPath(),]
else: else:
return [] return []
else: else:
return [] return []
except ConflictError: except ConflictError:
raise raise
except: except:
LOG('ERP5Security', PROBLEM, 'getUserByLogin failed', error=sys.exc_info()) LOG('ERP5Security', PROBLEM, 'getUserByLogin failed', error=sys.exc_info())
# Here we must raise an exception to prevent callers from caching # Here we must raise an exception to prevent callers from caching
# a result of a degraded situation. # a result of a degraded situation.
# The kind of exception does not matter as long as it's catched by # The kind of exception does not matter as long as it's catched by
# PAS and causes a lookup using another plugin or user folder. # PAS and causes a lookup using another plugin or user folder.
# As PAS does not define explicitely such exception, we must use # As PAS does not define explicitely such exception, we must use
# the _SWALLOWABLE_PLUGIN_EXCEPTIONS list. # the _SWALLOWABLE_PLUGIN_EXCEPTIONS list.
raise _SWALLOWABLE_PLUGIN_EXCEPTIONS[0] raise _SWALLOWABLE_PLUGIN_EXCEPTIONS[0]
finally: finally:
setSecurityManager(sm) setSecurityManager(sm)
# XXX: Here, we filter catalog result list ALTHOUGH we did pass # XXX: Here, we filter catalog result list ALTHOUGH we did pass
......
...@@ -697,8 +697,7 @@ class Catalog(Folder, ...@@ -697,8 +697,7 @@ class Catalog(Folder,
""" """
Import properties from an XML file. Import properties from an XML file.
""" """
f = open(file) with open(file) as f:
try:
doc = parse(f) doc = parse(f)
root = doc.documentElement root = doc.documentElement
try: try:
...@@ -747,8 +746,6 @@ class Catalog(Folder, ...@@ -747,8 +746,6 @@ class Catalog(Folder,
self.filter_dict[id]['expression_instance'] = None self.filter_dict[id]['expression_instance'] = None
finally: finally:
doc.unlink() doc.unlink()
finally:
f.close()
def manage_historyCompare(self, rev1, rev2, REQUEST, def manage_historyCompare(self, rev1, rev2, REQUEST,
historyComparisonResults=''): historyComparisonResults=''):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment