From a542a9ca4b9ee8a5041851bc936af3fd9e025f9d Mon Sep 17 00:00:00 2001
From: Yoshinori Okuji <yo@nexedi.com>
Date: Fri, 2 Dec 2005 09:29:10 +0000
Subject: [PATCH] Split ZopePatch into small parts.

git-svn-id: https://svn.erp5.org/repos/public/erp5/trunk@4461 20353a03-c40f-0410-a6d1-a30d3c3de9de
---
 product/ERP5Type/ZopePatch.py               | 2193 +------------------
 product/ERP5Type/patches/BTreeFolder2.py    |   98 +
 product/ERP5Type/patches/BaseRequest.py     |   72 +
 product/ERP5Type/patches/CMFCatalogAware.py |   38 +
 product/ERP5Type/patches/DA.py              |  125 ++
 product/ERP5Type/patches/DCWorkflow.py      |  300 +++
 product/ERP5Type/patches/Expression.py      |   22 +
 product/ERP5Type/patches/MembershipTool.py  |   17 +
 product/ERP5Type/patches/ObjectManager.py   |   40 +
 product/ERP5Type/patches/ProductContext.py  |   31 +
 product/ERP5Type/patches/PropertiedUser.py  |  188 ++
 product/ERP5Type/patches/PropertyManager.py |  175 ++
 product/ERP5Type/patches/States.py          |   42 +
 product/ERP5Type/patches/Transaction.py     |  179 ++
 product/ERP5Type/patches/WorkflowTool.py    |   91 +
 product/ERP5Type/patches/XMLExportImport.py |  170 ++
 product/ERP5Type/patches/__init__.py        |    0
 product/ERP5Type/patches/ppml.py            |  674 ++++++
 product/ERP5Type/patches/sqlvar.py          |  114 +
 19 files changed, 2394 insertions(+), 2175 deletions(-)
 create mode 100755 product/ERP5Type/patches/BTreeFolder2.py
 create mode 100755 product/ERP5Type/patches/BaseRequest.py
 create mode 100755 product/ERP5Type/patches/CMFCatalogAware.py
 create mode 100755 product/ERP5Type/patches/DA.py
 create mode 100755 product/ERP5Type/patches/DCWorkflow.py
 create mode 100755 product/ERP5Type/patches/Expression.py
 create mode 100755 product/ERP5Type/patches/MembershipTool.py
 create mode 100755 product/ERP5Type/patches/ObjectManager.py
 create mode 100755 product/ERP5Type/patches/ProductContext.py
 create mode 100755 product/ERP5Type/patches/PropertiedUser.py
 create mode 100755 product/ERP5Type/patches/PropertyManager.py
 create mode 100755 product/ERP5Type/patches/States.py
 create mode 100755 product/ERP5Type/patches/Transaction.py
 create mode 100755 product/ERP5Type/patches/WorkflowTool.py
 create mode 100755 product/ERP5Type/patches/XMLExportImport.py
 create mode 100755 product/ERP5Type/patches/__init__.py
 create mode 100755 product/ERP5Type/patches/ppml.py
 create mode 100755 product/ERP5Type/patches/sqlvar.py

diff --git a/product/ERP5Type/ZopePatch.py b/product/ERP5Type/ZopePatch.py
index 8fa3334dff..a6ce3c5613 100755
--- a/product/ERP5Type/ZopePatch.py
+++ b/product/ERP5Type/ZopePatch.py
@@ -17,2180 +17,23 @@
 # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
 # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
 # FOR A PARTICULAR PURPOSE
-#
-#
-# Based on: PropertyManager in OFS
-#
-##############################################################################
-
-
-from zLOG import LOG, INFO, ERROR, WARNING
-from string import join
-from DateTime import DateTime
-
-##############################################################################
-# Folder naming: member folder should be names as a singular in small caps
-from Products.CMFDefault.MembershipTool import MembershipTool
-MembershipTool.membersfolder_id = 'member'
-
-##############################################################################
-# Import: add rename feature
-from OFS.ObjectManager import ObjectManager, customImporters
-
-def ObjectManager_importObjectFromFile(self, filepath, verify=1, set_owner=1, id=None):
-    #LOG('_importObjectFromFile, filepath',0,filepath)
-    # locate a valid connection
-    connection=self._p_jar
-    obj=self
-
-    while connection is None:
-        obj=obj.aq_parent
-        connection=obj._p_jar
-    ob=connection.importFile(
-        filepath, customImporters=customImporters)
-    if verify: self._verifyObjectPaste(ob, validate_src=0)
-    if id is None:
-      id=ob.id
-    if hasattr(id, 'im_func'): id=id()
-    self._setObject(id, ob, set_owner=set_owner)
-
-    # try to make ownership implicit if possible in the context
-    # that the object was imported into.
-    ob=self._getOb(id)
-    ob.manage_changeOwnershipType(explicit=0)
-
-ObjectManager._importObjectFromFile=ObjectManager_importObjectFromFile
-
-##############################################################################
-# Properties
-from OFS.PropertyManager import PropertyManager, type_converters
-from OFS.PropertyManager import escape
-from Globals import DTMLFile
-from Products.ERP5Type.Utils import createExpressionContext
-from Products.ERP5Type.ERP5Type import ERP5TypeInformation
-from Products.CMFCore.Expression import Expression
-
-class ERP5PropertyManager(PropertyManager):
-  """
-    This class is only for backward compatibility.
-  """
-  pass
-
-PropertyManager_manage_propertiesForm=DTMLFile('dtml/properties', globals(),
-                                               property_extensible_schema__=1)
-
-
-def PropertyManager_updateProperty(self, id, value):
-    # Update the value of an existing property. If value
-    # is a string, an attempt will be made to convert
-    # the value to the type of the existing property.
-    self._wrapperCheck(value)
-    if not hasattr(self, 'isRADContent'):
-      if not self.hasProperty(id):
-          raise 'Bad Request', 'The property %s does not exist' % escape(id)
-    if type(value)==type(''):
-        proptype=self.getPropertyType(id) or 'string'
-        if type_converters.has_key(proptype):
-            value=type_converters[proptype](value)
-    #LOG('_updateProperty', 0, 'self = %r, id = %r, value = %r' % (self, id, value))
-    self._setPropValue(id, value)
-
-def PropertyManager_hasProperty(self, id):
-    """Return true if object has a property 'id'"""
-    for p in self.propertyIds():
-        if id==p:
-            return 1
-    return 0
-
-def PropertyManager_getProperty(self, id, d=None, evaluate=1):
-    """Get the property 'id', returning the optional second
-        argument or None if no such property is found."""
-    type = self.getPropertyType(id)
-    if evaluate and type == 'tales':
-        value = getattr(self, id)
-        expression = Expression(value)
-        econtext = createExpressionContext(self)
-        return expression(econtext)
-    elif type:
-      return getattr(self, id)
-    return d
-
-def PropertyManager_getPropertyType(self, id):
-    """Get the type of property 'id', returning None if no
-      such property exists"""
-    for md in self._propertyMap():
-        if md['id']==id:
-            return md.get('type', 'string')
-    return None
-
-def PropertyManager_setProperty(self, id, value, type=None):
-    # for selection and multiple selection properties
-    # the value argument indicates the select variable
-    # of the property
-
-    if type is None:
-      # Generate a default type
-      value_type = type(value)
-      if value_type in (type([]), type(())):
-        type = 'lines'
-      elif value_type is type(1):
-        type = 'int'
-      elif value_type is type(1L):
-        type = 'long'
-      elif value_type is type(1.0):
-        type = 'float'
-      elif value_type is type('a'):
-        if len(value_type.split('\n')) > 1:
-          type = 'text'
-        else:
-          type = 'string'
-      else:
-        type = 'string'
-
-    self._wrapperCheck(value)
-    if not self.valid_property_id(id):
-        raise 'Bad Request', 'Invalid or duplicate property id'
-
-    if type in ('selection', 'multiple selection'):
-        if not hasattr(self, value):
-            raise 'Bad Request', 'No select variable %s' % value
-        self._local_properties=getattr(self, '_local_properties', ()) + (
-            {'id':id, 'type':type, 'select_variable':value},)
-        if type=='selection':
-            self._setPropValue(id, '')
-        else:
-            self._setPropValue(id, [])
-    else:
-        self._local_properties=getattr(self, '_local_properties', ())+({'id':id,'type':type},)
-        self._setPropValue(id, value)
-
-def PropertyManager_delProperty(self, id):
-    if not self.hasProperty(id):
-        raise ValueError, 'The property %s does not exist' % escape(id)
-    self._delPropValue(id)
-    self._local_properties=tuple(filter(lambda i, n=id: i['id'] != n,
-                                  getattr(self, '_local_properties', ())))
-
-def PropertyManager_propertyIds(self):
-    """Return a list of property ids """
-    return map(lambda i: i['id'], self._propertyMap())
-
-def PropertyManager_propertyValues(self):
-    """Return a list of actual property objects """
-    return map(lambda i,s=self: getattr(s,i['id']), self._propertyMap())
-
-def PropertyManager_propertyItems(self):
-    """Return a list of (id,property) tuples """
-    return map(lambda i,s=self: (i['id'],getattr(s,i['id'])), self._propertyMap())
-
-def PropertyManager_propertyMap(self):
-    """Return a tuple of mappings, giving meta-data for properties """
-    return tuple(list(self._properties) + list(getattr(self, '_local_properties', ())))
-
-def PropertyManager_propdict(self):
-    dict={}
-    for p in self._propertyMap():
-        dict[p['id']]=p
-    return dict
-
-def PropertyManager_manage_addProperty(self, id, value, type, REQUEST=None):
-    """Add a new property via the web. Sets a new property with
-    the given id, type, and value."""
-    if type_converters.has_key(type):
-        value=type_converters[type](value)
-    #LOG('manage_addProperty', 0, 'id = %r, value = %r, type = %r, REQUEST = %r' % (id, value, type, REQUEST))
-    self._setProperty(id.strip(), value, type)
-    if REQUEST is not None:
-        return self.manage_propertiesForm(self, REQUEST)
-
-PropertyManager.manage_addProperty = PropertyManager_manage_addProperty
-PropertyManager.manage_propertiesForm = PropertyManager_manage_propertiesForm
-PropertyManager._updateProperty = PropertyManager_updateProperty
-PropertyManager.getPropertyType = PropertyManager_getPropertyType
-PropertyManager._setProperty = PropertyManager_setProperty
-PropertyManager._delProperty = PropertyManager_delProperty
-PropertyManager.propertyIds = PropertyManager_propertyIds
-PropertyManager.propertyValues = PropertyManager_propertyValues
-PropertyManager.propertyItems = PropertyManager_propertyItems
-PropertyManager._propertyMap = PropertyManager_propertyMap
-PropertyManager.propdict = PropertyManager_propdict
-PropertyManager.hasProperty = PropertyManager_hasProperty
-PropertyManager.getProperty = PropertyManager_getProperty
-ERP5TypeInformation.manage_propertiesForm = PropertyManager_manage_propertiesForm
-
-from ZPublisher.Converters import type_converters, field2string
-
-type_converters['tales'] = field2string
-
-##############################################################################
-# XML content of zsql methods
-import re
-try: from IOBTree import Bucket
-except: Bucket=lambda:{}
-from Shared.DC.ZRDB.Aqueduct import decodestring, parse
-from Shared.DC.ZRDB.DA import DA
-
-def DA_fromFile(self, filename):
-  """
-    Read the file and update self
-  """
-  f = file(filename)
-  s = f.read()
-  f.close()
-  self.fromText(s)
-
-def DA_fromText(self, text):
-  """
-    Read the string 'text' and updates self
-  """
-  start = text.find('<dtml-comment>')
-  end = text.find('</dtml-comment>')
-  block = text[start+14:end]
-  parameters = {}
-  for line in block.split('\n'):
-    pair = line.split(':',1)
-    if len(pair)!=2:
-      continue
-    parameters[pair[0].strip().lower()]=pair[1].strip()
-  # check for required and optional parameters
-  max_rows = parameters.get('max_rows',1000)
-  max_cache = parameters.get('max_cache',100)
-  cache_time = parameters.get('cache_time',0)
-  class_name = parameters.get('class_name','')
-  class_file = parameters.get('class_file','')
-  title = parameters.get('title','')
-  connection_id = parameters.get('connection_id','')
-  arguments = parameters.get('arguments','')
-  start = text.rfind('<params>')
-  end = text.rfind('</params>')
-  arguments = text[start+8:end]
-  template = text[end+9:]
-  while template.find('\n')==0:
-    template=template.replace('\n','',1)
-  self.manage_edit(title=title, connection_id=connection_id,
-                  arguments=arguments, template=template)
-  self.manage_advanced(max_rows, max_cache, cache_time, class_name, class_file)
-
-def DA_manage_FTPget(self):
-    """Get source for FTP download"""
-    self.REQUEST.RESPONSE.setHeader('Content-Type', 'text/plain')
-    return """<dtml-comment>
-title:%s
-connection_id:%s
-max_rows:%s
-max_cache:%s
-cache_time:%s
-class_name:%s
-class_file:%s
-</dtml-comment>
-<params>%s</params>
-%s""" % (self.title, self.connection_id,
-         self.max_rows_, self.max_cache_, self.cache_time_,
-         self.class_name_, self.class_file_,
-         self.arguments_src, self.src)
-
-# This function doesn't take care about properties by default
-def DA_PUT(self, REQUEST, RESPONSE):
-    """Handle put requests"""
-    if RESPONSE is not None: self.dav__init(REQUEST, RESPONSE)
-    if RESPONSE is not None: self.dav__simpleifhandler(REQUEST, RESPONSE, refresh=1)
-    body = REQUEST.get('BODY', '')
-    m = re.match('\s*<dtml-comment>(.*?)</dtml-comment>\s*\n', body, re.I | re.S)
-    if m:
-        property_src = m.group(1)
-        parameters = {}
-        for line in property_src.split('\n'):
-          pair = line.split(':',1)
-          if len(pair)!=2:
-            continue
-          parameters[pair[0].strip().lower()]=pair[1].strip()
-        # check for required and optional parameters
-        max_rows = parameters.get('max_rows',1000)
-        max_cache = parameters.get('max_cache',100)
-        cache_time = parameters.get('cache_time',0)
-        class_name = parameters.get('class_name','')
-        class_file = parameters.get('class_file','')
-        title = parameters.get('title','')
-        connection_id = parameters.get('connection_id','')
-        self.manage_advanced(max_rows, max_cache, cache_time, class_name, class_file)
-        self.title = str(title)
-        self.connection_id = str(connection_id)
-        body = body[m.end():]
-    m = re.match('\s*<params>(.*)</params>\s*\n', body, re.I | re.S)
-    if m:
-        self.arguments_src = m.group(1)
-        self._arg=parse(self.arguments_src)
-        body = body[m.end():]
-    template = body
-    self.src = template
-    self.template=t=self.template_class(template)
-    t.cook()
-    self._v_cache={}, Bucket()
-    if RESPONSE is not None: RESPONSE.setStatus(204)
-    return RESPONSE
-
-
-DA.fromFile = DA_fromFile
-DA.fromText = DA_fromText
-DA.manage_FTPget = DA_manage_FTPget
-DA.PUT = DA_PUT
-
-##############################################################################
-# Optimized rendering of global actions (cache)
-
-from Products.DCWorkflow.DCWorkflow import DCWorkflowDefinition
-from AccessControl import getSecurityManager, ClassSecurityInfo
-from Products.CMFCore.utils import getToolByName
-from DocumentTemplate.DT_Util import TemplateDict
-from Products.CMFCore.utils import  _getAuthenticatedUser
-from time import time
-from Products.ERP5Type.Cache import CachingMethod
-
-def DCWorkflowDefinition_listGlobalActions(self, info):
-    '''
-    Allows this workflow to
-    include actions to be displayed in the actions box.
-    Called on every request.
-    Returns the actions to be displayed to the user.
-    '''
-    def _listGlobalActions(user=None, id=None, portal_path=None):
-      if not self.worklists:
-          return None  # Optimization
-      sm = getSecurityManager()
-      portal = self._getPortalRoot()
-      res = []
-      fmt_data = None
-      # We want to display some actions depending on the current date
-      # So, we can now put this kind of expression : <= "%(now)s"
-      # May be this patch should be moved to listFilteredActions in the future
-      info.now = DateTime()
-      for id, qdef in self.worklists.items():
-          if qdef.actbox_name:
-              guard = qdef.guard
-              # Patch for ERP5 by JP Smets in order
-              # to implement worklists and search of local roles
-              searchres_len = 0
-              var_match_keys = qdef.getVarMatchKeys()
-              if var_match_keys:
-                  # Check the catalog for items in the worklist.
-                  catalog = getToolByName(self, 'portal_catalog')
-                  dict = {}
-                  for k in var_match_keys:
-                      v = qdef.getVarMatch(k)
-                      v_fmt = map(lambda x, info=info: x%info, v)
-                      dict[k] = v_fmt
-                  # Patch for ERP5 by JP Smets in order
-                  # to implement worklists and search of local roles
-                  if not (guard is None or guard.check(sm, self, portal)):
-                      dict['local_roles'] = guard.roles
-                  # Patch to use ZSQLCatalog and get high speed
-                  # LOG("PatchedDCWorkflowDefinition", 0, dict)
-                  searchres_len = int(apply(catalog.countResults, (), dict)[0][0])
-                  if searchres_len == 0:
-                      continue
-              if fmt_data is None:
-                  fmt_data = TemplateDict()
-                  fmt_data._push(info)
-              fmt_data._push({'count': searchres_len})
-              # Patch for ERP5 by JP Smets in order
-              # to implement worklists and search of local roles
-              if dict.has_key('local_roles'):
-                fmt_data._push({'local_roles': join(guard.roles,';')})
-              else:
-                fmt_data._push({'local_roles': ''})
-              res.append((id, {'name': qdef.actbox_name % fmt_data,
-                              'url': qdef.actbox_url % fmt_data,
-                              'worklist_id': id,
-                              'workflow_title': self.title,
-                              'workflow_id': self.id,
-                              'permissions': (),  # Predetermined.
-                              'category': qdef.actbox_category}))
-              fmt_data._pop()
-      res.sort()
-      return map((lambda (id, val): val), res)
-
-    # Return Cache
-    _listGlobalActions = CachingMethod(_listGlobalActions, id='listGlobalActions', cache_duration = 300)
-    user = str(_getAuthenticatedUser(self))
-    return _listGlobalActions(user=user, id=self.id, portal_path=self._getPortalRoot().getPhysicalPath())
-
-
-DCWorkflowDefinition.listGlobalActions = DCWorkflowDefinition_listGlobalActions
-
-##############################################################################
-# Stribger repair of BTreeFolder2
-import sys
-from Products.BTreeFolder2.BTreeFolder2 import BTreeFolder2Base
-from Acquisition import aq_base
-from BTrees.OOBTree import OOBTree
-from BTrees.OIBTree import OIBTree, union
-from BTrees.Length import Length
-from OFS.ObjectManager import BadRequestException, BeforeDeleteException
-from Products.ZCatalog.Lazy import LazyMap
-
-class ERP5BTreeFolder2Base (BTreeFolder2Base):
-
-    def _cleanup(self):
-        """Cleans up errors in the BTrees.
-
-        Certain ZODB bugs have caused BTrees to become slightly insane.
-        Fortunately, there is a way to clean up damaged BTrees that
-        always seems to work: make a new BTree containing the items()
-        of the old one.
-
-        Returns 1 if no damage was detected, or 0 if damage was
-        detected and fixed.
-        """
-        from BTrees.check import check
-        path = '/'.join(self.getPhysicalPath())
-        try:
-            check(self._tree)
-            for key in self._tree.keys():
-                if not self._tree.has_key(key):
-                    raise AssertionError(
-                        "Missing value for key: %s" % repr(key))
-            check(self._mt_index)
-            for key, object in self._tree.items():
-                meta_type = getattr(object, 'meta_type', None)
-                if meta_type is not None:
-                  if not self._mt_index.has_key(meta_type):
-                      raise AssertionError(
-                          "Missing meta_type index for key: %s" % repr(key))
-            for key, value in self._mt_index.items():
-                if (not self._mt_index.has_key(key)
-                    or self._mt_index[key] is not value):
-                    raise AssertionError(
-                        "Missing or incorrect meta_type index: %s"
-                        % repr(key))
-                check(value)
-                for k in value.keys():
-                    if not value.has_key(k) or not self._tree.has_key(k):
-                        raise AssertionError(
-                            "Missing values for meta_type index: %s"
-                            % repr(key))
-            return 1
-        except (AssertionError, KeyError):
-            LOG('BTreeFolder2', WARNING,
-                'Detected damage to %s. Fixing now.' % path,
-                error=sys.exc_info())
-            try:
-                self._tree = OOBTree(self._tree)
-                mt_index = OOBTree()
-                for id, object in self._tree.items():
-                  # Update the meta type index.
-                  meta_type = getattr(object, 'meta_type', None)
-                  if meta_type is not None:
-                      ids = mt_index.get(meta_type, None)
-                      if ids is None:
-                          ids = OIBTree()
-                          mt_index[meta_type] = ids
-                      ids[id] = 1
-                #LOG('Added All Object in BTree mti',0, map(lambda x:str(x), mt_index.keys()))
-                self._mt_index = OOBTree(mt_index)
-            except:
-                LOG('BTreeFolder2', ERROR, 'Failed to fix %s.' % path,
-                    error=sys.exc_info())
-                raise
-            else:
-                LOG('BTreeFolder2', INFO, 'Fixed %s.' % path)
-            return 0
-
-BTreeFolder2Base._cleanup = ERP5BTreeFolder2Base._cleanup
-
-##############################################################################
-# Stribger repair of BTreeFolder2
-
-from Products.DCWorkflow.DCWorkflow import DCWorkflowDefinition, StateChangeInfo, ObjectMoved, createExprContext, aq_parent, aq_inner
-from Products.DCWorkflow import DCWorkflow
-from Products.DCWorkflow.Transitions import TRIGGER_WORKFLOW_METHOD
-from Products.CMFCore.WorkflowCore import WorkflowException
-from Products.ERP5Type.Utils import convertToMixedCase
-
-class ValidationFailed(Exception):
-    """Transition can not be executed because data is not in consistent state"""
-
-DCWorkflow.ValidationFailed = ValidationFailed
-
-from AccessControl import ModuleSecurityInfo
-ModuleSecurityInfo('Products.DCWorkflow.DCWorkflow').declarePublic('ValidationFailed')
-
-
-def DCWorkflowDefinition_executeTransition(self, ob, tdef=None, kwargs=None):
-    '''
-    Private method.
-    Puts object in a new state.
-    '''
-    sci = None
-    econtext = None
-    moved_exc = None
-
-    # Figure out the old and new states.
-    old_sdef = self._getWorkflowStateOf(ob)
-    old_state = old_sdef.getId()
-    if tdef is None:
-        new_state = self.initial_state
-        former_status = {}
-    else:
-        new_state = tdef.new_state_id
-        if not new_state:
-            # Stay in same state.
-            new_state = old_state
-        former_status = self._getStatusOf(ob)
-    new_sdef = self.states.get(new_state, None)
-    if new_sdef is None:
-        raise WorkflowException, (
-            'Destination state undefined: ' + new_state)
-
-    # Execute the "before" script.
-    before_script_success = 1
-    if tdef is not None and tdef.script_name:
-        script = self.scripts[tdef.script_name]
-        # Pass lots of info to the script in a single parameter.
-        sci = StateChangeInfo(
-            ob, self, former_status, tdef, old_sdef, new_sdef, kwargs)
-        try:
-            #LOG('_executeTransition', 0, "script = %s, sci = %s" % (repr(script), repr(sci)))
-            script(sci)  # May throw an exception.
-        except ValidationFailed, validation_exc:
-            before_script_success = 0
-            before_script_error_message = str(validation_exc)
-        except ObjectMoved, moved_exc:
-            ob = moved_exc.getNewObject()
-            # Re-raise after transition
-
-    # Update variables.
-    state_values = new_sdef.var_values
-    if state_values is None: state_values = {}
-    tdef_exprs = None
-    if tdef is not None: tdef_exprs = tdef.var_exprs
-    if tdef_exprs is None: tdef_exprs = {}
-    status = {}
-    for id, vdef in self.variables.items():
-        if not vdef.for_status:
-            continue
-        expr = None
-        if state_values.has_key(id):
-            value = state_values[id]
-        elif tdef_exprs.has_key(id):
-            expr = tdef_exprs[id]
-        elif not vdef.update_always and former_status.has_key(id):
-            # Preserve former value
-            value = former_status[id]
-        else:
-            if vdef.default_expr is not None:
-                expr = vdef.default_expr
-            else:
-                value = vdef.default_value
-        if expr is not None:
-            # Evaluate an expression.
-            if econtext is None:
-                # Lazily create the expression context.
-                if sci is None:
-                    sci = StateChangeInfo(
-                        ob, self, former_status, tdef,
-                        old_sdef, new_sdef, kwargs)
-                econtext = createExprContext(sci)
-            value = expr(econtext)
-        status[id] = value
-
-    # Do not proceed in case of failure of before script
-    if not before_script_success:
-        status[self.state_var] = old_state # Remain in state
-        tool = aq_parent(aq_inner(self))
-        tool.setStatusOf(self.id, ob, status)
-        sci = StateChangeInfo(
-            ob, self, status, tdef, old_sdef, new_sdef, kwargs)
-        sci.setWorkflowVariable(ob, workflow_id=self.id, error_message = before_script_error_message)
-        return new_sdef
-
-    # Update state.
-    status[self.state_var] = new_state
-    tool = aq_parent(aq_inner(self))
-    tool.setStatusOf(self.id, ob, status)
-
-    # Make sure that the error message is empty. # Why ?
-    #sci = StateChangeInfo(
-    #    ob, self, status, tdef, old_sdef, new_sdef, kwargs)
-    #sci.setWorkflowVariable(ob, error_message = '')
-
-    # Update role to permission assignments.
-    self.updateRoleMappingsFor(ob)
-
-    # Execute the "after" script.
-    if tdef is not None and tdef.after_script_name:
-        # Script can be either script or workflow method
-        #LOG('_executeTransition', 0, 'new_sdef.transitions = %s' % (repr(new_sdef.transitions)))
-        if tdef.after_script_name in filter(lambda k: self.transitions[k].trigger_type == TRIGGER_WORKFLOW_METHOD,
-                                                                                  new_sdef.transitions):
-          script = getattr(ob, convertToMixedCase(tdef.after_script_name))
-          script()
-        else:
-          script = self.scripts[tdef.after_script_name]
-          # Pass lots of info to the script in a single parameter.
-          sci = StateChangeInfo(
-              ob, self, status, tdef, old_sdef, new_sdef, kwargs)
-          script(sci)  # May throw an exception.
-
-    # Return the new state object.
-    if moved_exc is not None:
-        # Propagate the notification that the object has moved.
-        raise moved_exc
-    else:
-        return new_sdef
-
-
-DCWorkflowDefinition._executeTransition = DCWorkflowDefinition_executeTransition
-from Products.DCWorkflow.utils import modifyRolesForPermission
-
-# Patch updateRoleMappingsFor so that if 2 workflows define security, then we
-# should do an AND operation between each permission
-def updateRoleMappingsFor(self, ob):
-    '''
-    Changes the object permissions according to the current
-    state.
-    '''
-    changed = 0
-    sdef = self._getWorkflowStateOf(ob)
-
-    tool = aq_parent(aq_inner(self))
-    other_workflow_list = \
-       [x for x in tool.getWorkflowsFor(ob) if x.id != self.id and isinstance(x,DCWorkflowDefinition)]
-    other_data_list = []
-    for other_workflow in other_workflow_list:
-      other_sdef = other_workflow._getWorkflowStateOf(ob)
-      if other_sdef is not None and other_sdef.permission_roles is not None:
-        other_data_list.append((other_workflow,other_sdef))
-    # Be carefull, permissions_roles should not change
-    # from list to tuple or vice-versa. (in modifyRolesForPermission, 
-    # list means acquire roles, tuple means do not acquire)
-    if sdef is not None and self.permissions:
-        for p in self.permissions:
-            roles = []
-            refused_roles = []
-            role_type = 'list'
-            if sdef.permission_roles is not None:
-                roles = sdef.permission_roles.get(p, roles)
-                if type(roles) is type(()):
-                  role_type = 'tuple'
-                roles = list(roles)
-            # We will check that each role is activated
-            # in each DCWorkflow
-            for other_workflow,other_sdef in other_data_list:
-              if p in other_workflow.permissions:
-                for role in roles:
-                  other_roles = other_sdef.permission_roles.get(p, [])
-                  if type(other_roles) is type(()) :
-                    role_type = 'tuple'
-                  if role not in other_roles :
-                    refused_roles.append(role)
-            for role in refused_roles :
-              roles.remove(role)
-            if role_type=='tuple':
-              roles = tuple(roles)
-            if modifyRolesForPermission(ob, p, roles):
-                changed = 1
-    return changed
-
-DCWorkflowDefinition.updateRoleMappingsFor = updateRoleMappingsFor
-
-# This patch allows to use workflowmethod as an after_script
-# However, the right way of doing would be to have a combined state of TRIGGER_USER_ACTION and TRIGGER_WORKFLOW_METHOD
-# as well as workflow inheritance. This way, different user actions and dialogs can be specified easliy
-# For now, we split UI transitions and logics transitions so that UI can be different and logics the same
-from Products.DCWorkflow.Transitions import TransitionDefinition
-
-class ERP5TransitionDefinition (TransitionDefinition):
-
-    def getAvailableScriptIds(self):
-        return self.getWorkflow().scripts.keys() +  filter(
-          lambda k: self.getWorkflow().transitions[k].trigger_type == TRIGGER_WORKFLOW_METHOD, self.getWorkflow().transitions.keys())
-
-TransitionDefinition.getAvailableScriptIds = ERP5TransitionDefinition.getAvailableScriptIds
-
 ##############################################################################
-# Adding commit_prepare to the zodb transaction
-try:
-    from ZODB import Transaction
-    
-    hosed = Transaction.hosed
-    free_transaction = Transaction.free_transaction
-    jar_cmp = Transaction.jar_cmp
-    
-    def commit(self, subtransaction=None):
-        """Finalize the transaction."""
-        objects = self._objects
-        
-        subjars = []
-        if subtransaction:
-            if self._sub is None:
-                # Must store state across multiple subtransactions
-                # so that the final commit can commit all subjars.
-                self._sub = {}
-        else:
-            if self._sub is not None:
-                # This commit is for a top-level transaction that
-                # has previously committed subtransactions.  Do
-                # one last subtransaction commit to clear out the
-                # current objects, then commit all the subjars.
-                if objects:
-                    self.commit(1)
-                    objects = []
-                subjars = self._sub.values()
-                subjars.sort(jar_cmp)
-                self._sub = None
-                
-                # If there were any non-subtransaction-aware jars
-                # involved in earlier subtransaction commits, we need
-                # to add them to the list of jars to commit.
-                if self._non_st_objects is not None:
-                    objects.extend(self._non_st_objects)
-                    self._non_st_objects = None
-
-        if (objects or subjars) and hosed:
-            # Something really bad happened and we don't
-            # trust the system state.
-            raise POSException.TransactionError, hosed_msg
-
-        # It's important that:
-        #
-        # - Every object in self._objects is either committed or
-        #   aborted.
-        #
-        # - For each object that is committed we call tpc_begin on
-        #   it's jar at least once
-        #
-        # - For every jar for which we've called tpc_begin on, we
-        #   either call tpc_abort or tpc_finish. It is OK to call
-        #   these multiple times, as the storage is required to ignore
-        #   these calls if tpc_begin has not been called.
-        #
-        # - That we call tpc_begin() in a globally consistent order,
-        #   so that concurrent transactions involving multiple storages
-        #   do not deadlock.
-        try:
-            ncommitted = 0
-            # Do prepare until number of jars is stable - this could
-            # create infinite loop
-            jars_len = -1
-            jars = self._get_jars(objects, subtransaction)
-            objects_len = len(self._objects)
-            while len(jars) != jars_len:
-                jars_len = len(jars)
-                self._commit_prepare(jars, subjars, subtransaction)
-                if len(self._objects) != objects_len:
-                  objects.extend(self._objects[objects_len:])
-                  objects_len = len(self._objects)
-                jars = self._get_jars(objects, subtransaction)
-            try:
-                # If not subtransaction, then jars will be modified.
-                self._commit_begin(jars, subjars, subtransaction)
-                ncommitted += self._commit_objects(objects)
-                if not subtransaction:
-                    # Unless this is a really old jar that doesn't
-                    # implement tpc_vote(), it must raise an exception
-                    # if it can't commit the transaction.
-                    for jar in jars:
-                        try:
-                            vote = jar.tpc_vote
-                        except AttributeError:
-                            pass
-                        else:
-                            vote(self)
-
-                # Handle multiple jars separately.  If there are
-                # multiple jars and one fails during the finish, we
-                # mark this transaction manager as hosed.
-                if len(jars) == 1:
-                    self._finish_one(jars[0])
-                else:
-                    self._finish_many(jars)
-            except:
-                # Ugh, we got an got an error during commit, so we
-                # have to clean up.  First save the original exception
-                # in case the cleanup process causes another
-                # exception.
-                error = sys.exc_info()
-                try:
-                    self._commit_error(objects, ncommitted, jars, subjars)
-                except:
-                    LOG('ZODB', ERROR,
-                        "A storage error occured during transaction "
-                        "abort.  This shouldn't happen.",
-                        error=error)
-                raise error[0], error[1], error[2]
-        finally:
-            del objects[:] # clear registered
-            if not subtransaction and self._id is not None:
-                free_transaction()
-
-    def _commit_prepare(self, jars, subjars, subtransaction):
-        if subtransaction:
-            assert not subjars
-            for jar in jars:
-                try:
-                    jar.tpc_prepare(self, subtransaction)
-                except TypeError:
-                    # Assume that TypeError means that tpc_begin() only
-                    # takes one argument, and that the jar doesn't
-                    # support subtransactions.
-                    jar.tpc_prepare(self)
-                except AttributeError:
-                    # Assume that KeyError means that tpc_prepare
-                    # not available
-                    pass
-        else:
-            # Merge in all the jars used by one of the subtransactions.
-            
-            # When the top-level subtransaction commits, the tm must
-            # call commit_sub() for each jar involved in one of the
-            # subtransactions.  The commit_sub() method should call
-            # tpc_begin() on the storage object.
-            
-            # It must also call tpc_begin() on jars that were used in
-            # a subtransaction but don't support subtransactions.
-            
-            # These operations must be performed on the jars in order.
-            
-            # Modify jars inplace to include the subjars, too.
-            jars += subjars
-            jars.sort(jar_cmp)
-            # assume that subjars is small, so that it's cheaper to test
-            # whether jar in subjars than to make a dict and do has_key.
-            for jar in jars:
-                #if jar in subjars:
-                #  pass
-                #else:
-                try:
-                    jar.tpc_prepare(self)
-                except AttributeError:
-                    # Assume that KeyError means that tpc_prepare
-                    # not available
-                    pass
-
-    Transaction.Transaction.commit = commit
-    Transaction.Transaction._commit_prepare = _commit_prepare
-except ImportError:
-    pass
-
-
-##############################################################################
-# Make sure Interaction Workflows are called even if method not wrapped
-
-from Products.CMFCore.WorkflowTool import WorkflowTool
-
-def WorkflowTool_wrapWorkflowMethod(self, ob, method_id, func, args, kw):
-
-    """ To be invoked only by WorkflowCore.
-        Allows a workflow definition to wrap a WorkflowMethod.
-
-        By default, the workflow tool takes the first workflow wich
-        support the method_id. In ERP5, with Interaction Worfklows, we
-        may have many workflows wich can support a worfklow method,
-        that's why we need this patch
-
-        We should have 1 or 0 classic workflow (ie a DCWorkflow), and
-        0 or many Interaction workflows. We should take care that the
-        method will be called once
-    """
-    # Check workflow containing the workflow method
-    wf_list = []
-    wfs = self.getWorkflowsFor(ob)
-    if wfs:
-      for w in wfs:
-#         LOG('ERP5WorkflowTool.wrapWorkflowMethod, is wfMSupported', 0, 
-#              repr((w.isWorkflowMethodSupported(ob, method_id), 
-#                    w.getId(), ob, method_id )))
-        if (hasattr(w, 'isWorkflowMethodSupported')
-          and w.isWorkflowMethodSupported(ob, method_id)):
-          #wf = w
-          #break
-          wf_list.append(w)
-    else:
-      wfs = ()
-    # If no transition matched, simply call the method    
-    # And return
-    if len(wf_list)==0:
-      return apply(func, args, kw)
-    # Call notifyBefore on each workflow
-    for w in wfs:
-      w.notifyBefore(ob, method_id, args=args, kw=kw)
-    # Call the method on matching workflows
-    only_interaction_defined = 1
-    for w in wf_list:
-      if w.__class__.__name__ != 'InteractionWorkflowDefinition':
-        only_interaction_defined = 0
-        result = self._invokeWithNotification(
-            [], ob, method_id, w.wrapWorkflowMethod,
-            (ob, method_id, func, args, kw), {})
-    # If only interaction workflows are defined, we need to call the method
-    # manually
-    if only_interaction_defined:
-      result = apply(func, args, kw)
-    # Call notifySuccess on each workflow
-    for w in wfs:
-      w.notifySuccess(ob, method_id, result, args=args, kw=kw)
-    return result
-    
-WorkflowTool.wrapWorkflowMethod = WorkflowTool_wrapWorkflowMethod
-
-from Products.DCWorkflow.DCWorkflow import DCWorkflowDefinition
-
-def DCWorkflowDefinition_notifyBefore(self, ob, action, args=None, kw=None):
-    '''
-    Notifies this workflow of an action before it happens,
-    allowing veto by exception.  Unless an exception is thrown, either
-    a notifySuccess() or notifyException() can be expected later on.
-    The action usually corresponds to a method name.
-    '''
-    pass
-
-def DCWorkflowDefinition_notifySuccess(self, ob, action, result, args=None, kw=None):
-    '''
-    Notifies this workflow that an action has taken place.
-    '''
-    pass
-
-DCWorkflowDefinition.notifyBefore = DCWorkflowDefinition_notifyBefore
-DCWorkflowDefinition.notifySuccess = DCWorkflowDefinition_notifySuccess
-
-##############################################################################
-# Make sure the xml export will be ordered
-
-from Shared.DC.xml import ppml
-from base64 import encodestring
-from cStringIO import StringIO
-try:
-  from ZODB.serialize import referencesf
-except ImportError:
-  from ZODB.referencesf import referencesf
-from ZODB.ExportImport import TemporaryFile
-from pickle import Pickler, EMPTY_DICT, MARK, DICT
-from cPickle import loads, dumps
-from types import *
-
-# Jython has PyStringMap; it's a dict subclass with string keys
-try:
-    from org.python.core import PyStringMap
-except ImportError:
-    PyStringMap = None
-
-# Ordered pickles
-class OrderedPickler(Pickler):
-
-    dispatch = Pickler.dispatch.copy()
-
-    def save_dict(self, obj):
-        write = self.write
-
-        if self.bin:
-            write(EMPTY_DICT)
-        else:   # proto 0 -- can't use EMPTY_DICT
-            write(MARK + DICT)
-
-        self.memoize(obj)
-        item_list = obj.items() # New version by JPS for sorting
-        item_list.sort(lambda a, b: cmp(a[0], b[0])) # New version by JPS for sorting
-        self._batch_setitems(item_list.__iter__())
-
-    dispatch[DictionaryType] = save_dict
-    if not PyStringMap is None:
-        dispatch[PyStringMap] = save_dict
-
-def reorderPickle(jar, p):
-    from ZODB.ExportImport import Ghost, Unpickler, Pickler, StringIO, persistent_id
-
-    oids = {}
-    storage = jar._storage
-    new_oid = storage.new_oid
-    store = storage.store
-
-    def persistent_load(ooid,
-                        Ghost=Ghost,
-                        oids=oids, wrote_oid=oids.has_key,
-                        new_oid=storage.new_oid):
-
-        "Remap a persistent id to an existing ID and create a ghost for it."
-
-        if type(ooid) is TupleType: ooid, klass = ooid
-        else: klass=None
-
-        try:
-          Ghost=Ghost()
-          Ghost.oid=ooid
-        except TypeError:
-          Ghost=Ghost(ooid)
-        return Ghost
-
-
-    # Reorder pickle by doing I/O
-    pfile = StringIO(p)
-    unpickler=Unpickler(pfile)
-    unpickler.persistent_load=persistent_load
-
-    newp=StringIO()
-    pickler=OrderedPickler(newp,1)
-    pickler.persistent_id=persistent_id
-
-    pickler.dump(unpickler.load())
-    obj = unpickler.load()
-    pickler.dump(obj)
-    p=newp.getvalue()
-    return obj, p
-
-def XMLrecord(oid, plen, p, id_mapping):
-    # Proceed as usual
-    q=ppml.ToXMLUnpickler
-    f=StringIO(p)
-    u=q(f)
-    id=ppml.u64(oid)
-    id = id_mapping[id]
-    old_aka = encodestring(oid)[:-1]
-    aka=encodestring(ppml.p64(long(id)))[:-1]  # Rebuild oid based on mapped id
-    id_mapping.setConvertedAka(old_aka, aka)
-    u.idprefix=str(id)+'.'
-    p=u.load(id_mapping=id_mapping).__str__(4)
-    if f.tell() < plen:
-        p=p+u.load(id_mapping=id_mapping).__str__(4)
-    String='  <record id="%s" aka="%s">\n%s  </record>\n' % (id, aka, p)
-    return String
-
-from OFS import XMLExportImport
-XMLExportImport.XMLrecord = XMLrecord
-
-def exportXML(jar, oid, file=None):
-
-    if file is None: file=TemporaryFile()
-    elif type(file) is StringType: file=open(file,'w+b')
-    id_mapping = ppml.MinimalMapping()
-    #id_mapping = ppml.IdentityMapping()
-    write=file.write
-    write('<?xml version="1.0"?>\012<ZopeData>\012')
-    version=jar._version
-    ref=referencesf
-    oids=[oid]
-    done_oids={}
-    done=done_oids.has_key
-    load=jar._storage.load
-    original_oid = oid
-    reordered_pickle = []
-    # Build mapping for refs
-    while oids:
-        oid=oids[0]
-        del oids[0]
-        if done(oid): continue
-        done_oids[oid]=1
-        try: p, serial = load(oid, version)
-        except: pass # Ick, a broken reference
-        else:
-            o, p = reorderPickle(jar, p)
-            reordered_pickle.append((oid, o, p))
-            XMLrecord(oid,len(p),p, id_mapping)
-            # Determine new oids added to the list after reference calculation
-            old_oids = tuple(oids)
-            ref(p, oids)
-            new_oids = []
-            for i in oids:
-                if i not in old_oids: new_oids.append(i)
-            # Sort new oids based on id of object
-            new_oidict = {}
-            for oid in new_oids:
-                try:
-                    p, serial = load(oid, version)
-                    o, p = reorderPickle(jar, p)
-                    new_oidict[oid] = getattr(o, 'id', None)
-                except:
-                    new_oidict[oid] = None # Ick, a broken reference
-            new_oids.sort(lambda a,b: cmp(new_oidict[a], new_oidict[b]))
-            # Build new sorted oids
-            oids = list(old_oids) + new_oids
-    # Do real export
-    for (oid, o, p) in reordered_pickle:
-        write(XMLrecord(oid,len(p),p, id_mapping))
-    write('</ZopeData>\n')
-    return file
-
-XMLExportImport.exportXML = exportXML
-
-######################################################################################
-# Shared/DC/xml/ppml patch
-
-# Import everything right now, not after
-# or new patch will not work
-from Shared.DC.xml.ppml import *
-
-class Global:
-
-    def __init__(self, module, name, mapping):
-        self.module=module
-        self.name=name
-        self.mapping = mapping
-
-    def __str__(self, indent=0):
-        id = ''
-        if hasattr(self, 'id'):
-            if self.mapping.isMarked(self.id): id=' id="%s"' % self.mapping[self.id]
-        name=string.lower(self.__class__.__name__)
-        return '%s<%s%s name="%s" module="%s"/>\n' % (
-            ' '*indent, name, id, self.name, self.module)
-
-from Shared.DC.xml import ppml
-ppml.Global = Global
-
-class Scalar:
-
-    def __init__(self, v, mapping):
-        self._v=v
-        self.mapping = mapping
-
-    def value(self): return self._v
-
-    def __str__(self, indent=0):
-        id = ''
-        name=string.lower(self.__class__.__name__)
-        result = '%s<%s%s>%s</%s>\n' % (
-            ' '*indent, name, id, self.value(), name)
-        if hasattr(self, 'id'):
-            # The value is Immutable - let us add it the the immutable mapping
-            # to reduce the number of unreadable references
-            self.mapping.setImmutable(self.id, Immutable(value = result))
-        return result
-
-ppml.Scalar = Scalar
-
-class Immutable:
-    def __init__(self, value):
-        self.value = value
-
-    def getValue(self):
-        return self.value
-
-class String(Scalar):
-    def __init__(self, v, mapping, encoding=''):
-        encoding, v = convert(v)
-        self.encoding=encoding
-        self._v=v
-        self.mapping = mapping
-    def __str__(self,indent=0,map_value=0):
-        v = self.value()
-        if map_value:
-            # This is used when strings represent references which need to be converted
-            if self.encoding == 'base64':
-                v = self.mapping.convertBase64(v)
-            else:
-                # Make sure we never produce this kind of xml output
-                raise
-        id = ''
-        encoding=''
-        if hasattr(self, 'encoding'):
-            if self.encoding != 'repr':
-                # JPS repr is default encoding
-                encoding=' encoding="%s"' % self.encoding
-        name=string.lower(self.__class__.__name__)
-        result = '%s<%s%s%s>%s</%s>\n' % (
-            ' '*indent, name, id, encoding, v, name)
-        if hasattr(self, 'id'):
-            # The value is Immutable - let us add it the the immutable mapping
-            # to reduce the number of unreadable references
-            self.mapping.setImmutable(self.id, Immutable(value = result))
-        return result
-
-ppml.String = String
-
-class Unicode(String):
-    def value(self):
-        return self._v.encode('utf-8')
-
-ppml.Unicode = Unicode
-
-class Wrapper:
-
-    def __init__(self, v, mapping):
-        self._v=v
-        self.mapping = mapping
-
-    def value(self): return self._v
-
-    def __str__(self, indent=0):
-        id = ''
-        if hasattr(self, 'id'):
-            if self.mapping.isMarked(self.id): id=' id="%s"' % self.mapping[self.id]
-        name=string.lower(self.__class__.__name__)
-        v=self._v
-        i=' '*indent
-        if isinstance(v,Scalar):
-            return '%s<%s%s> %s </%s>\n' % (i, name, id, str(v)[:-1], name)
-        else:
-            v=v.__str__(indent+2)
-            return '%s<%s%s>\n%s%s</%s>\n' % (i, name, id, v, i, name)
-
-ppml.Wrapper = Wrapper
-
-class Collection:
-
-    def __init__(self, mapping):
-        self.mapping = mapping
-
-    def __str__(self, indent=0):
-        id = ''
-        if hasattr(self, 'id'):
-            if self.mapping.isMarked(self.id): id=' id="%s"' % self.mapping[self.id]
-        name=string.lower(self.__class__.__name__)
-        i=' '*indent
-        if self:
-            return '%s<%s%s>\n%s%s</%s>\n' % (
-                i, name, id, self.value(indent+2), i, name)
-        else:
-            return '%s<%s%s/>\n' % (i, name, id)
-
-ppml.Collection = Collection
-
-class Dictionary(Collection):
-    def __init__(self, mapping):
-        self.mapping = mapping
-        self._d=[]
-    def __len__(self): return len(self._d)
-    def __setitem__(self, k, v): self._d.append((k,v))
-    def value(self, indent):
-        #self._d.sort(lambda a, b: cmp(a[0]._v, b[0]._v)) # Sort the sequence by key JPS Improvement
-        return string.join(
-            map(lambda i, ind=' '*indent, indent=indent+4:
-                '%s<item>\n'
-                '%s'
-                '%s'
-                '%s</item>\n'
-                %
-                (ind,
-                 Key(i[0], self.mapping).__str__(indent),
-                 Value(i[1], self.mapping).__str__(indent),
-                 ind),
-                self._d
-                ),
-            '')
-
-ppml.Dictionary = Dictionary
-
-class Sequence(Collection):
-
-    def __init__(self, mapping, v=None):
-        if not v: v=[]
-        self._subs=v
-        self.mapping = mapping
-
-    def __len__(self): return len(self._subs)
-
-    def append(self, v): self._subs.append(v)
-
-    # Bugfix JPS
-    def extend(self, v): self._subs.extend(v)
-
-    def value(self, indent):
-        return string.join(map(
-            lambda v, indent=indent: v.__str__(indent),
-            self._subs),'')
-
-ppml.Sequence = Sequence
-
-class Persistent(Wrapper):
-
-    def __str__(self, indent=0):
-        id = ''
-        if hasattr(self, 'id'):
-            if self.mapping.isMarked(self.id): id=' id="%s"' % self.mapping[self.id]
-        name=string.lower(self.__class__.__name__)
-        v=self._v
-        i=' '*indent
-        if isinstance(v,String):
-            return '%s<%s%s> %s </%s>\n' % (i, name, id, v.__str__(map_value=1)[:-1], name)
-        elif isinstance(v,Scalar):
-            return '%s<%s%s> %s </%s>\n' % (i, name, id, str(v)[:-1], name)
-        else:
-            v=v.__str__(indent+2)
-            return '%s<%s%s>\n%s%s</%s>\n' % (i, name, id, v, i, name)
-
-ppml.Persistent = Persistent
-
-class Reference(Scalar):
-    def __init__(self, v, mapping):
-        self._v=v
-        self.mapping = mapping
-    def __str__(self, indent=0):
-        v=self._v
-        name=string.lower(self.__class__.__name__)
-        #LOG('Reference', 0, str(v))
-        if self.mapping.hasImmutable(v):
-          return self.mapping.getImmutable(v).getValue()
-        #LOG('noImmutable', 0, "%s mapped to %s" % (v, self.mapping[v]))
-        self.mapping.mark(v)
-        return '%s<%s id="%s"/>\n' % (' '*indent,name,self.mapping[v])
-
-ppml.Reference = Reference
-Get = Reference
-ppml.Get = Get
-
-class Object(Sequence):
-    def __init__(self, klass, args, mapping):
-        self._subs=[Klass(klass, mapping), args]
-        self.mapping = mapping
-
-    def __setstate__(self, v): self.append(State(v, self.mapping))
-
-ppml.Object = Object
-
-class IdentityMapping:
-
-    def __init__(self):
-      self.immutable = {}
-
-    def resetMapping(self):
-      pass
-
-    def __getitem__(self, id):
-      return id
-
-    def setConvertedAka(self, old, new):
-      pass
-
-    def convertBase64(self, s):
-      return s
-
-    def mark(self, v):
-      pass
-
-    def isMarked(self, v):
-      return 1
-
-    def setImmutable(self, k, v):
-      self.immutable[k] = v
-
-    def getImmutable(self, k):
-      return self.immutable[k]
-
-    def hasImmutable(self, k):
-      return self.immutable.has_key(k)
-
-
-ppml.IdentityMapping = IdentityMapping
-
-class MinimalMapping(IdentityMapping):
-    def __init__(self):
-      self.mapped_id = {}
-      self.mapped_core_id = {}
-      self.last_sub_id = {}
-      self.last_id = 1
-      self.converted_aka = {}
-      self.marked_reference = {}
-      self.immutable = {}
-
-    def resetMapping(self):
-      self.mapped_id = {}
-      self.mapped_core_id = {}
-      self.last_sub_id = {}
-      self.last_id = 1
-      self.converted_aka = {}
-      self.marked_reference = {}
-
-    def __getitem__(self, id):
-      id = str(id)
-      split_id = id.split('.')
-      if len(split_id) == 2:
-        (core_id, sub_id) = split_id
-      elif len(split_id) == 1:
-        core_id = split_id[0]
-        sub_id = None
-      else:
-        raise
-      if not self.mapped_id.has_key(core_id):
-        if sub_id is not None:
-          # Use existing id
-          self.mapped_id[core_id] = {}
-          self.mapped_core_id[core_id] = self.last_id - 1
-          self.last_sub_id[core_id] = 1
-        else:
-          # Create new core_id if not defined
-          self.mapped_id[core_id] = {}
-          self.mapped_core_id[core_id] = self.last_id
-          self.last_sub_id[core_id] = 1
-          self.last_id = self.last_id + 1
-      if sub_id is None:
-        return self.mapped_core_id[core_id]
-      if not self.mapped_id[core_id].has_key(sub_id):
-        # Create new sub_id if not defined
-        self.mapped_id[core_id][sub_id] = self.last_sub_id[core_id]
-        self.last_sub_id[core_id] = self.last_sub_id[core_id] + 1
-      return "%s.%s" % (self.mapped_core_id[core_id], self.mapped_id[core_id][sub_id])
-
-    def convertBase64(self, s):
-      return self.converted_aka.get(s, s)
-
-    def setConvertedAka(self, old, new):
-      self.converted_aka[old] =  new
-
-    def mark(self, v):
-      self.marked_reference[v] = 1
-
-    def isMarked(self, v):
-      return self.marked_reference.has_key(v)
-
-    def __str__(self, a):
-      return "Error here"
-
-ppml.MinimalMapping = MinimalMapping
-
-class List(Sequence): pass
-class Tuple(Sequence): pass
-
-class Klass(Wrapper): pass
-class State(Wrapper): pass
-class Pickle(Wrapper): pass
-
-class Int(Scalar): pass
-class Float(Scalar): pass
-
-class Key(Wrapper): pass
-class Value(Wrapper): pass
-
-class Long(Scalar):
-    def value(self):
-        result = str(self._v)
-        if result[-1:] == 'L':
-            return result[:-1]
-        return result
-
-class ToXMLUnpickler(Unpickler):
-
-    def load(self, id_mapping=None):
-      if id_mapping is None:
-        self.id_mapping = IdentityMapping()
-      else:
-        self.id_mapping = id_mapping
-      return Pickle(Unpickler.load(self), self.id_mapping)
-
-    dispatch = {}
-    dispatch.update(Unpickler.dispatch)
-
-    def persistent_load(self, v):
-        return Persistent(v, self.id_mapping)
-
-    def load_persid(self):
-        pid = self.readline()[:-1]
-        self.append(self.persistent_load(String(pid, self.id_mapping)))
-    dispatch[PERSID] = load_persid
-
-    def load_none(self):
-        self.append(none)
-    dispatch[NONE] = load_none
-
-    def load_int(self):
-        self.append(Int(string.atoi(self.readline()[:-1]), self.id_mapping))
-    dispatch[INT] = load_int
-
-    def load_binint(self):
-        self.append(Int(mloads('i' + self.read(4)), self.id_mapping))
-    dispatch[BININT] = load_binint
-
-    def load_binint1(self):
-        self.append(Int(mloads('i' + self.read(1) + '\000\000\000'), self.id_mapping))
-    dispatch[BININT1] = load_binint1
-
-    def load_binint2(self):
-        self.append(Int(mloads('i' + self.read(2) + '\000\000'), self.id_mapping))
-    dispatch[BININT2] = load_binint2
-
-    def load_long(self):
-        self.append(Long(string.atol(self.readline()[:-1], 0), self.id_mapping))
-    dispatch[LONG] = load_long
-
-    def load_float(self):
-        self.append(Float(string.atof(self.readline()[:-1]), self.id_mapping))
-    dispatch[FLOAT] = load_float
-
-    def load_binfloat(self, unpack=struct.unpack):
-        self.append(Float(unpack('>d', self.read(8))[0], self.id_mapping))
-    dispatch[BINFLOAT] = load_binfloat
-
-    def load_string(self):
-        self.append(String(eval(self.readline()[:-1],
-                                {'__builtins__': {}}), self.id_mapping)) # Let's be careful
-    dispatch[STRING] = load_string
-
-    def load_binstring(self):
-        len = mloads('i' + self.read(4))
-        self.append(String(self.read(len), self.id_mapping))
-    dispatch[BINSTRING] = load_binstring
-
-    def load_unicode(self):
-        self.append(Unicode(unicode(eval(self.readline()[:-1],
-                                         {'__builtins__': {}})), self.id_mapping)) # Let's be careful
-    dispatch[UNICODE] = load_unicode
-
-    def load_binunicode(self):
-        len = mloads('i' + self.read(4))
-        self.append(Unicode(unicode(self.read(len), 'utf-8'), self.id_mapping))
-    dispatch[BINUNICODE] = load_binunicode
-
-    def load_short_binstring(self):
-        len = mloads('i' + self.read(1) + '\000\000\000')
-        self.append(String(self.read(len), self.id_mapping))
-    dispatch[SHORT_BINSTRING] = load_short_binstring
-
-    def load_tuple(self):
-        k = self.marker()
-        #LOG('load_tuple, k',0,k)
-        #LOG('load_tuple, stack[k+1:]',0,self.stack[k+1:])
-        self.stack[k:] = [Tuple(self.id_mapping, v=self.stack[k+1:])]
-    dispatch[TUPLE] = load_tuple
-
-    def load_empty_tuple(self):
-        self.stack.append(Tuple(self.id_mapping))
-    dispatch[EMPTY_TUPLE] = load_empty_tuple
-
-    def load_empty_list(self):
-        self.stack.append(List(self.id_mapping))
-    dispatch[EMPTY_LIST] = load_empty_list
-
-    def load_empty_dictionary(self):
-        self.stack.append(Dictionary(self.id_mapping))
-    dispatch[EMPTY_DICT] = load_empty_dictionary
-
-    def load_list(self):
-        k = self.marker()
-        self.stack[k:] = [List(self.id_mapping, v=self.stack[k+1:])]
-    dispatch[LIST] = load_list
-
-    def load_dict(self):
-        k = self.marker()
-        d = Dictionary(self.id_mapping)
-        items = self.stack[k+1:]
-        for i in range(0, len(items), 2):
-            key = items[i]
-            value = items[i+1]
-            d[key] = value
-        self.stack[k:] = [d]
-    dispatch[DICT] = load_dict
-
-    def load_inst(self):
-        k = self.marker()
-        args = Tuple(self.id_mapping, v=self.stack[k+1:])
-        del self.stack[k:]
-        module = self.readline()[:-1]
-        name = self.readline()[:-1]
-        value=Object(Global(module, name, self.id_mapping), args, self.id_mapping)
-        self.append(value)
-    dispatch[INST] = load_inst
-
-    def load_obj(self):
-        stack = self.stack
-        k = self.marker()
-        klass = stack[k + 1]
-        del stack[k + 1]
-        args = Tuple(self.id_mapping, v=stack[k + 1:])
-        del stack[k:]
-        value=Object(klass,args, self.id_mapping)
-        self.append(value)
-    dispatch[OBJ] = load_obj
-
-    def load_global(self):
-        module = self.readline()[:-1]
-        name = self.readline()[:-1]
-        self.append(Global(module, name, self.id_mapping))
-    dispatch[GLOBAL] = load_global
-
-    def load_reduce(self):
-        stack = self.stack
-
-        callable = stack[-2]
-        arg_tup  = stack[-1]
-        del stack[-2:]
-
-        value=Object(callable, arg_tup, self.id_mapping)
-        self.append(value)
-    dispatch[REDUCE] = load_reduce
-
-    idprefix=''
-
-    def load_get(self):
-        self.append(Get(self.idprefix+self.readline()[:-1], self.id_mapping))
-    dispatch[GET] = load_get
-
-    def load_binget(self):
-        i = mloads('i' + self.read(1) + '\000\000\000')
-        self.append(Get(self.idprefix+`i`, self.id_mapping))
-    dispatch[BINGET] = load_binget
-
-    def load_long_binget(self):
-        i = mloads('i' + self.read(4))
-        self.append(Get(self.idprefix+`i`, self.id_mapping))
-    dispatch[LONG_BINGET] = load_long_binget
-
-    def load_put(self):
-        self.stack[-1].id=self.idprefix+self.readline()[:-1]
-    dispatch[PUT] = load_put
-
-    def load_binput(self):
-        i = mloads('i' + self.read(1) + '\000\000\000')
-        #LOG('load_binput', 0, 'self.stack = %r, self.idprefix+`i` = %r' % (self.stack, self.idprefix+`i`))
-        self.stack[-1].id=self.idprefix+`i`
-    dispatch[BINPUT] = load_binput
-
-    def load_long_binput(self):
-        i = mloads('i' + self.read(4))
-        self.stack[-1].id=self.idprefix+`i`
-    dispatch[LONG_BINPUT] = load_long_binput
-
-    class LogCall:
-      def __init__(self, func):
-        self.func = func
-
-      def __call__(self, context):
-        #LOG('LogCall', 0, 'self.stack = %r, func = %s' % (context.stack, self.func.__name__))
-        return self.func(context)
-
-    #for code in dispatch.keys():
-    #  dispatch[code] = LogCall(dispatch[code])
-
-ppml.ToXMLUnpickler = ToXMLUnpickler
-
-def end_string(self, tag, data):
-    v=data[2]
-    a=data[1]
-    encoding = a.get('encoding','repr') # JPS: repr is default encoding
-    if encoding != '': # Bugfix since (is was used on string)
-        v=unconvert(encoding,v)
-    if a.has_key('id'): self._pickleids[a['id']]=v
-    return v
-
-ppml.end_string = end_string
-
-def end_unicode(self, tag, data):
-    return unicode(end_string(self, tag, data), 'utf-8')
-
-ppml.end_unicode = end_unicode
-
-class xmlUnpickler(NoBlanks, xyap):
-    start_handlers={'pickle': start_pickle}
-    end_handlers={
-        'int':
-        lambda self,tag,data,atoi=string.atoi,name=name:
-            atoi(name(self, tag, data)),
-        'long':
-        lambda self,tag,data,atoi=string.atoi,name=name:
-            atoi(name(self, tag, data)),
-        'boolean':
-        lambda self,tag,data,atoi=string.atoi,name=name:
-            atoi(name(self, tag, data)),
-        'string': end_string ,
-        'unicode': end_unicode ,
-        'double':
-        lambda self,tag,data,atof=string.atof,name=name:
-            atof(name(self, tag, data)),
-        'float':
-        lambda self,tag,data,atof=string.atof,name=name:
-            atof(name(self, tag, data)),
-        'none': lambda self, tag, data: None,
-        'list': end_list,
-        'tuple': end_tuple,
-        'dictionary': end_dictionary,
-        'key': lambda self, tag, data: data[2],
-        'value': lambda self, tag, data: data[2],
-        'item': lambda self, tag, data: data[2:],
-        'reference': lambda self, tag, data: self._pickleids[data[1]['id']],
-        'state': lambda self, tag, data: data[2],
-        'klass': lambda self, tag, data: data[2],
-        }
-
-ppml.xmlUnpickler = xmlUnpickler
-
-def save_string(self, tag, data):
-    binary=self.binary
-    v=''
-    a=data[1]
-    if len(data)>2:
-        for x in data[2:]:
-            v=v+x
-    encoding=a.get('encoding','repr') # JPS: repr is default encoding
-    if encoding is not '':
-        v=unconvert(encoding,v)
-    put='p'
-    if binary:
-        l=len(v)
-        s=mdumps(l)[1:]
-        if (l<256):
-            v='U'+s[0]+v
-        else:
-            v='T'+s+v
-        put='q'
-    else: v="S'"+v+"'\012"
-    return save_put(self, v, a)
-
-ppml.save_string = save_string
-
-def save_unicode(self, tag, data):
-    binary=self.binary
-    v=''
-    a=data[1]
-    if len(data)>2:
-        for x in data[2:]:
-            v=v+x
-    encoding=a.get('encoding','repr') # JPS: repr is default encoding
-    if encoding is not '':
-        v=unconvert(encoding,v)
-    if binary:
-        l=len(v)
-        s=mdumps(l)[1:]
-        v=BINUNICODE+s+v
-    else: v=UNICODE+"'"+v+"'\012"
-    return save_put(self, v, a)
-
-ppml.save_unicode = save_unicode
-
-class xmlPickler(NoBlanks, xyap):
-    start_handlers={
-        'pickle': lambda self, tag, attrs: [tag, attrs],
-        }
-    end_handlers={
-        'pickle': lambda self, tag, data: data[2]+'.',
-        'none': lambda self, tag, data: 'N',
-        'int': save_int,
-        'long': lambda self, tag, data: 'L'+data[2]+'L\012',
-        'float': save_float,
-        'string': save_string,
-        'unicode': save_unicode,
-        'reference': save_reference,
-        'tuple': save_tuple,
-        'list': save_list,
-        'dictionary': save_dict,
-        'item': lambda self, tag, data, j=string.join: j(data[2:],''),
-        'value': lambda self, tag, data: data[2],
-        'key' : lambda self, tag, data: data[2],
-        'object': save_object,
-        'klass': lambda self, tag, data: data[2],
-        'state': lambda self, tag, data: data[2],
-        'global': save_global,
-        'persistent': save_persis,
-        }
-
-ppml.xmlPickler = xmlPickler
-
-class Tuple(Sequence): pass
-
-ppml.Tuple = Tuple
-
-######################################################################################
-# Expression patch
-
-from Products.CMFCore.Expression import Expression
-
-def Expression_hash(self):
-  return hash(self.text)
-
-Expression.__hash__ = Expression_hash
-
-######################################################################################
-# dtml-sqlvar patch to convert None to NULL
-
-from Shared.DC.ZRDB.sqlvar import SQLVar
-from Shared.DC.ZRDB import sqlvar
-from string import atoi,atof
-
-def SQLVar_render(self, md):
-    name=self.__name__
-    args=self.args
-    t=args['type']
-    try:
-        expr=self.expr
-        if type(expr) is type(''): v=md[expr]
-        else: v=expr(md)
-    except:
-        if args.has_key('optional') and args['optional']:
-            return 'null'
-        if type(expr) is not type(''):
-            raise
-        raise ValueError, 'Missing input variable, <em>%s</em>' % name
-
-    if t=='int':
-        try:
-            if type(v) is StringType:
-                if v[-1:]=='L':
-                    v=v[:-1]
-                atoi(v)
-            else: v=str(int(v))
-        except:
-            if not v and args.has_key('optional') and args['optional']:
-                return 'null'
-            raise ValueError, (
-                'Invalid integer value for <em>%s</em>' % name)
-    elif t=='float':
-        try:
-            if type(v) is StringType:
-                if v[-1:]=='L':
-                    v=v[:-1]
-                atof(v)
-            else: v=str(float(v))
-        except:
-            if not v and args.has_key('optional') and args['optional']:
-                return 'null'
-            raise ValueError, (
-                'Invalid floating-point value for <em>%s</em>' % name)
-    # Patched by yo
-    elif t=='datetime':
-        if v is None:
-            if args.has_key('optional') and args['optional']:
-                return 'null'
-            else:
-                raise ValueError, (
-                    'Invalid datetime value for <em>%s</em>: %r' % (name, v))
-
-        try:
-            if hasattr(v, 'ISO'):
-                v=v.ISO()
-            if hasattr(v, 'strftime'):
-                v=v.strftime('%Y-%m-%d %H:%M:%S')
-            else: v=str(v)
-        except:
-            if not v and args.has_key('optional') and args['optional']:
-                return 'null'
-            raise ValueError, (
-                'Invalid datetime value for <em>%s</em>: %r' % (name, v))
-
-        v=md.getitem('sql_quote__',0)(v)
-    # End of patch
-    else:
-        # Patched by yo
-        if v is None:
-            if args.has_key('optional') and args['optional']:
-                return 'null'
-            else:
-                raise ValueError, (
-                    'Invalid string value for <em>%s</em>' % name)
-        # End of patch
-
-        if not isinstance(v, (str, unicode)):
-            v=str(v)
-        if not v and t=='nb':
-            if args.has_key('optional') and args['optional']:
-                return 'null'
-            else:
-                raise ValueError, (
-                    'Invalid empty string value for <em>%s</em>' % name)
-
-        v=md.getitem('sql_quote__',0)(v)
-        #if find(v,"\'") >= 0: v=join(split(v,"\'"),"''")
-        #v="'%s'" % v
-
-    return v
-
-# Patched by yo. datetime is added.
-valid_type={'int':1, 'float':1, 'string':1, 'nb': 1, 'datetime' : 1}.has_key
-
-SQLVar.render = SQLVar_render
-SQLVar.__call__ = SQLVar_render
-sqlvar.valid_type = valid_type
-
-
-######################################################################################
-# CMFCatalogAware patch for accepting arbitrary parameters.
-
-from Products.CMFCore.CMFCatalogAware import CMFCatalogAware
-
-def reindexObject(self, idxs=[], *args, **kw):
-    """
-        Reindex the object in the portal catalog.
-        If idxs is present, only those indexes are reindexed.
-        The metadata is always updated.
-
-        Also update the modification date of the object,
-        unless specific indexes were requested.
-    """
-    if idxs == []:
-        # Update the modification date.
-        if hasattr(aq_base(self), 'notifyModified'):
-            self.notifyModified()
-    catalog = getToolByName(self, 'portal_catalog', None)
-    if catalog is not None:
-        catalog.reindexObject(self, idxs=idxs, *args, **kw)
-
-CMFCatalogAware.reindexObject = reindexObject
-
-
-##########################################
-# ZPublisher should drop requests without a good http referer
-
-from ZPublisher.BaseRequest import BaseRequest
-
-BaseRequest.erp5_old_traverse = BaseRequest.traverse
-
-import AccessControl
-
-def erp5_new_traverse(request, path, response=None, validated_hook=None):
-
-  if response is None: response=request.response
-  object = BaseRequest.erp5_old_traverse(request, path, response=response, validated_hook=validated_hook)
-  http_url = request.get('ACTUAL_URL', '').strip()
-  http_referer = request.get('HTTP_REFERER', '').strip()
-
-  security_manager = AccessControl.getSecurityManager()
-  user = security_manager.getUser()
-  user_roles = user.getRolesInContext(object)
-
-  # Manager can do anything
-  if 'Manager' in user_roles:
-    return object
-
-  # are we within a portal ?
-  try:
-    context = getattr(object, 'im_self', None)
-    if context is not None:
-      try:
-        portal_object = context.getPortalObject()
-      except AttributeError:
-        portal_object = object.getPortalObject()
-    else :
-      portal_object = object.getPortalObject()
-  except AttributeError:
-    pass
-  else:
-    if not getattr(portal_object, 'require_referer', 0):
-      return object
-    portal_url = portal_object.absolute_url()
-    if http_referer != '':
-      # if HTTP_REFERER is set, user can acces the object if referer is ok
-      if http_referer.startswith(portal_url):
-        return object
-      else:
-        LOG('HTTP_REFERER_CHECK : BAD REFERER !', 0, 'request : "%s", referer : "%s"' % (http_url, referer))
-        response.unauthorized()
-    else:
-      # no HTTP_REFERER, we only allow to reach portal_url
-      for i in ('/', '/index_html', '/login_form', '/view'):
-        if http_url.endswith(i):
-          http_url = http_url[:-len(i)]
-          break
-      if len(http_url) == 0 or not portal_url.startswith(http_url):
-        LOG('HTTP_REFERER_CHECK : NO REFERER !', 0, 'request : "%s"' % http_url)
-        response.unauthorized()
-
-  return object
-
-BaseRequest.traverse = erp5_new_traverse
-
-######################################################################################
-# AttrDict patch for more dict-like methods.
-try:
-    from App.ProductContext import AttrDict
-
-    def AttrDict_getitem(self, name):
-        try:
-            return getattr(self.ob, name)
-        except AttributeError:
-            raise KeyError
-
-    def AttrDict_has_key(self, name):
-        return hasattr(self.ob, name)
-
-    AttrDict.__getitem__ = AttrDict_getitem
-    AttrDict.has_key = AttrDict_has_key
-except ImportError:
-    pass
-
-    
-############################################################################
-# Locale roles acquisition patch for PAS
-
-from Acquisition import aq_inner, aq_parent
-
-try:
-  from PluggableAuthService.PropertiedUser import PropertiedUser
-except ImportError:
-  PropertiedUser = None
-  
-def getRolesInContext( self, object ):
-
-    """ Return the list of roles assigned to the user.
-
-    o Include local roles assigned in context of the passed-in object.
-
-    o Include *both* local roles assigned directly to us *and* those
-      assigned to our groups.
-
-    o Ripped off from AccessControl.User.BasicUser, which provides
-      no other extension mechanism. :(
-    """
-    user_id = self.getId()
-    # [ x.getId() for x in self.getGroups() ]
-    group_ids = self.getGroups()
-
-    principal_ids = list( group_ids )
-    principal_ids.insert( 0, user_id )
-
-    local ={} 
-    object = aq_inner( object )
-
-    while 1:
-
-        local_roles = getattr( object, '__ac_local_roles__', None )
-
-        if local_roles:
-
-            if callable( local_roles ):
-                local_roles = local_roles()
-
-            dict = local_roles or {}
-
-            for principal_id in principal_ids:
-                for role in dict.get( principal_id, [] ):
-                    local[ role ] = 1
-                    
-        # patch by Klaus for LocalRole blocking
-        if hasattr(object, '_getAcquireLocalRoles'):
-            if not object._getAcquireLocalRoles():
-                break
-
-        inner = aq_inner( object )
-        parent = aq_parent( inner )
-
-        if parent is not None:
-            object = parent
-            continue
-
-        new = getattr( object, 'im_self', None )
-
-        if new is not None:
-
-            object = aq_inner( new )
-            continue
-
-        break
-    
-    return list( self.getRoles() ) + local.keys()
-
-def allowed( self, object, object_roles=None ):
-
-    """ Check whether the user has access to object.
-
-    o The user must have one of the roles in object_roles to allow access.
-
-    o Include *both* local roles assigned directly to us *and* those
-      assigned to our groups.
-
-    o Ripped off from AccessControl.User.BasicUser, which provides
-      no other extension mechanism. :(
-    """
-    if object_roles is _what_not_even_god_should_do:
-        return 0
-
-    # Short-circuit the common case of anonymous access.
-    if object_roles is None or 'Anonymous' in object_roles:
-        return 1
-
-    # Provide short-cut access if object is protected by 'Authenticated'
-    # role and user is not nobody
-    if 'Authenticated' in object_roles and (
-        self.getUserName() != 'Anonymous User'):
-        return 1
-
-    # Check for ancient role data up front, convert if found.
-    # This should almost never happen, and should probably be
-    # deprecated at some point.
-    if 'Shared' in object_roles:
-        object_roles = self._shared_roles(object)
-        if object_roles is None or 'Anonymous' in object_roles:
-            return 1
-
-    # Check for a role match with the normal roles given to
-    # the user, then with local roles only if necessary. We
-    # want to avoid as much overhead as possible.
-    user_roles = self.getRoles()
-    for role in object_roles:
-        if role in user_roles:
-            if self._check_context(object):
-                return 1
-            return None
-
-    # Still have not found a match, so check local roles. We do
-    # this manually rather than call getRolesInContext so that
-    # we can incur only the overhead required to find a match.
-    inner_obj = aq_inner( object )
-    user_id = self.getId()
-    # [ x.getId() for x in self.getGroups() ]
-    group_ids = self.getGroups()
-
-    principal_ids = list( group_ids )
-    principal_ids.insert( 0, user_id )
-
-    while 1:
-
-        local_roles = getattr( inner_obj, '__ac_local_roles__', None )
-
-        if local_roles:
-
-            if callable( local_roles ):
-                local_roles = local_roles()
-
-            dict = local_roles or {}
-
-            for principal_id in principal_ids:
-
-                local_roles = dict.get( principal_id, [] )
-
-                for role in object_roles:
-
-                    if role in local_roles:
-
-                        if self._check_context( object ):
-                            return 1
-
-                        return 0
-                    
-        # patch by Klaus for LocalRole blocking
-        if hasattr(object, '_getAcquireLocalRoles'):
-            if not object._getAcquireLocalRoles():
-                break
-
-        inner = aq_inner( inner_obj )
-        parent = aq_parent( inner )
-
-        if parent is not None:
-            inner_obj = parent
-            continue
-
-        new = getattr( inner_obj, 'im_self', None )
-
-        if new is not None:
-            inner_obj = aq_inner( new )
-            continue
-
-        break
-
-    return None
-
-if PropertiedUser is not None:
-  PropertiedUser.getRolesInContext = getRolesInContext
-  PropertiedUser.allowed = allowed
-  
-############################################################################
-# State types patch for DCWorkflow
-from Products.DCWorkflow.States import StateDefinition
-
-_properties_form = DTMLFile('dtml/state_properties', globals())
-
-def getAvailableTypeList(self):
-  """This is a method specific to ERP5. This returns a list of state types, which are used for portal methods.
-  """
-  return ('current_inventory', 'reserved_inventory', 'future_inventory',
-          'draft_order', 'planned_order', )
-  
-def setProperties(self, title='', transitions=(), REQUEST=None, description='', type_list=()):
-    '''
-    '''
-    self.title = str(title)
-    self.description = str(description)
-    self.transitions = tuple(map(str, transitions))
-    # This is patched by yo.
-    self.type_list = tuple(type_list)
-    if REQUEST is not None:
-        return self.manage_properties(REQUEST, 'Properties changed.')
 
-StateDefinition._properties_form = _properties_form
-StateDefinition.getAvailableTypeList = getAvailableTypeList
-StateDefinition.setProperties = setProperties
-StateDefinition.type_list = ()
\ No newline at end of file
+# Load all monkey patches
+from Products.ERP5Type.patches import MembershipTool
+from Products.ERP5Type.patches import ObjectManager
+from Products.ERP5Type.patches.PropertyManager import ERP5PropertyManager
+from Products.ERP5Type.patches import DA
+from Products.ERP5Type.patches.DCWorkflow import ValidationFailed, ERP5TransitionDefinition
+from Products.ERP5Type.patches.BTreeFolder2 import ERP5BTreeFolder2Base
+from Products.ERP5Type.patches import Transaction
+from Products.ERP5Type.patches import WorkflowTool
+from Products.ERP5Type.patches import XMLExportImport
+from Products.ERP5Type.patches import ppml
+from Products.ERP5Type.patches import Expression
+from Products.ERP5Type.patches import sqlvar
+from Products.ERP5Type.patches import CMFCatalogAware
+from Products.ERP5Type.patches import BaseRequest
+from Products.ERP5Type.patches import ProductContext
+from Products.ERP5Type.patches import PropertiedUser
+from Products.ERP5Type.patches import States
diff --git a/product/ERP5Type/patches/BTreeFolder2.py b/product/ERP5Type/patches/BTreeFolder2.py
new file mode 100755
index 0000000000..3beb76091a
--- /dev/null
+++ b/product/ERP5Type/patches/BTreeFolder2.py
@@ -0,0 +1,98 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Stribger repair of BTreeFolder2
+import sys
+from Products.BTreeFolder2.BTreeFolder2 import BTreeFolder2Base
+from Acquisition import aq_base
+from BTrees.OOBTree import OOBTree
+from BTrees.OIBTree import OIBTree, union
+from BTrees.Length import Length
+from OFS.ObjectManager import BadRequestException, BeforeDeleteException
+from Products.ZCatalog.Lazy import LazyMap
+from zLOG import LOG
+
+class ERP5BTreeFolder2Base(BTreeFolder2Base):
+  """
+    This class is only for backward compatibility.
+  """
+  pass
+
+def _cleanup(self):
+    """Cleans up errors in the BTrees.
+
+    Certain ZODB bugs have caused BTrees to become slightly insane.
+    Fortunately, there is a way to clean up damaged BTrees that
+    always seems to work: make a new BTree containing the items()
+    of the old one.
+
+    Returns 1 if no damage was detected, or 0 if damage was
+    detected and fixed.
+    """
+    from BTrees.check import check
+    path = '/'.join(self.getPhysicalPath())
+    try:
+        check(self._tree)
+        for key in self._tree.keys():
+            if not self._tree.has_key(key):
+                raise AssertionError(
+                    "Missing value for key: %s" % repr(key))
+        check(self._mt_index)
+        for key, object in self._tree.items():
+            meta_type = getattr(object, 'meta_type', None)
+            if meta_type is not None:
+              if not self._mt_index.has_key(meta_type):
+                  raise AssertionError(
+                      "Missing meta_type index for key: %s" % repr(key))
+        for key, value in self._mt_index.items():
+            if (not self._mt_index.has_key(key)
+                or self._mt_index[key] is not value):
+                raise AssertionError(
+                    "Missing or incorrect meta_type index: %s"
+                    % repr(key))
+            check(value)
+            for k in value.keys():
+                if not value.has_key(k) or not self._tree.has_key(k):
+                    raise AssertionError(
+                        "Missing values for meta_type index: %s"
+                        % repr(key))
+        return 1
+    except (AssertionError, KeyError):
+        LOG('BTreeFolder2', WARNING,
+            'Detected damage to %s. Fixing now.' % path,
+            error=sys.exc_info())
+        try:
+            self._tree = OOBTree(self._tree)
+            mt_index = OOBTree()
+            for id, object in self._tree.items():
+              # Update the meta type index.
+              meta_type = getattr(object, 'meta_type', None)
+              if meta_type is not None:
+                  ids = mt_index.get(meta_type, None)
+                  if ids is None:
+                      ids = OIBTree()
+                      mt_index[meta_type] = ids
+                  ids[id] = 1
+            #LOG('Added All Object in BTree mti',0, map(lambda x:str(x), mt_index.keys()))
+            self._mt_index = OOBTree(mt_index)
+        except:
+            LOG('BTreeFolder2', ERROR, 'Failed to fix %s.' % path,
+                error=sys.exc_info())
+            raise
+        else:
+            LOG('BTreeFolder2', INFO, 'Fixed %s.' % path)
+        return 0
+
+BTreeFolder2Base._cleanup = _cleanup
diff --git a/product/ERP5Type/patches/BaseRequest.py b/product/ERP5Type/patches/BaseRequest.py
new file mode 100755
index 0000000000..f443e3b04f
--- /dev/null
+++ b/product/ERP5Type/patches/BaseRequest.py
@@ -0,0 +1,72 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# ZPublisher should drop requests without a good http referer
+
+from ZPublisher.BaseRequest import BaseRequest
+import AccessControl
+
+BaseRequest.erp5_old_traverse = BaseRequest.traverse
+
+def erp5_new_traverse(request, path, response=None, validated_hook=None):
+
+  if response is None: response=request.response
+  object = BaseRequest.erp5_old_traverse(request, path, response=response, validated_hook=validated_hook)
+  http_url = request.get('ACTUAL_URL', '').strip()
+  http_referer = request.get('HTTP_REFERER', '').strip()
+
+  security_manager = AccessControl.getSecurityManager()
+  user = security_manager.getUser()
+  user_roles = user.getRolesInContext(object)
+
+  # Manager can do anything
+  if 'Manager' in user_roles:
+    return object
+
+  # are we within a portal ?
+  try:
+    context = getattr(object, 'im_self', None)
+    if context is not None:
+      try:
+        portal_object = context.getPortalObject()
+      except AttributeError:
+        portal_object = object.getPortalObject()
+    else :
+      portal_object = object.getPortalObject()
+  except AttributeError:
+    pass
+  else:
+    if not getattr(portal_object, 'require_referer', 0):
+      return object
+    portal_url = portal_object.absolute_url()
+    if http_referer != '':
+      # if HTTP_REFERER is set, user can acces the object if referer is ok
+      if http_referer.startswith(portal_url):
+        return object
+      else:
+        LOG('HTTP_REFERER_CHECK : BAD REFERER !', 0, 'request : "%s", referer : "%s"' % (http_url, referer))
+        response.unauthorized()
+    else:
+      # no HTTP_REFERER, we only allow to reach portal_url
+      for i in ('/', '/index_html', '/login_form', '/view'):
+        if http_url.endswith(i):
+          http_url = http_url[:-len(i)]
+          break
+      if len(http_url) == 0 or not portal_url.startswith(http_url):
+        LOG('HTTP_REFERER_CHECK : NO REFERER !', 0, 'request : "%s"' % http_url)
+        response.unauthorized()
+
+  return object
+
+BaseRequest.traverse = erp5_new_traverse
diff --git a/product/ERP5Type/patches/CMFCatalogAware.py b/product/ERP5Type/patches/CMFCatalogAware.py
new file mode 100755
index 0000000000..1aec75ad89
--- /dev/null
+++ b/product/ERP5Type/patches/CMFCatalogAware.py
@@ -0,0 +1,38 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# CMFCatalogAware patch for accepting arbitrary parameters.
+
+from Products.CMFCore.CMFCatalogAware import CMFCatalogAware
+from Acquisition import aq_base
+from Products.CMFCore.utils import getToolByName
+
+def reindexObject(self, idxs=[], *args, **kw):
+    """
+        Reindex the object in the portal catalog.
+        If idxs is present, only those indexes are reindexed.
+        The metadata is always updated.
+
+        Also update the modification date of the object,
+        unless specific indexes were requested.
+    """
+    if idxs == []:
+        # Update the modification date.
+        if hasattr(aq_base(self), 'notifyModified'):
+            self.notifyModified()
+    catalog = getToolByName(self, 'portal_catalog', None)
+    if catalog is not None:
+        catalog.reindexObject(self, idxs=idxs, *args, **kw)
+
+CMFCatalogAware.reindexObject = reindexObject
diff --git a/product/ERP5Type/patches/DA.py b/product/ERP5Type/patches/DA.py
new file mode 100755
index 0000000000..56aec20051
--- /dev/null
+++ b/product/ERP5Type/patches/DA.py
@@ -0,0 +1,125 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# XML content of zsql methods
+import re
+try: from IOBTree import Bucket
+except: Bucket=lambda:{}
+from Shared.DC.ZRDB.Aqueduct import decodestring, parse
+from Shared.DC.ZRDB.DA import DA
+
+def DA_fromFile(self, filename):
+  """
+    Read the file and update self
+  """
+  f = file(filename)
+  s = f.read()
+  f.close()
+  self.fromText(s)
+
+def DA_fromText(self, text):
+  """
+    Read the string 'text' and updates self
+  """
+  start = text.find('<dtml-comment>')
+  end = text.find('</dtml-comment>')
+  block = text[start+14:end]
+  parameters = {}
+  for line in block.split('\n'):
+    pair = line.split(':',1)
+    if len(pair)!=2:
+      continue
+    parameters[pair[0].strip().lower()]=pair[1].strip()
+  # check for required and optional parameters
+  max_rows = parameters.get('max_rows',1000)
+  max_cache = parameters.get('max_cache',100)
+  cache_time = parameters.get('cache_time',0)
+  class_name = parameters.get('class_name','')
+  class_file = parameters.get('class_file','')
+  title = parameters.get('title','')
+  connection_id = parameters.get('connection_id','')
+  arguments = parameters.get('arguments','')
+  start = text.rfind('<params>')
+  end = text.rfind('</params>')
+  arguments = text[start+8:end]
+  template = text[end+9:]
+  while template.find('\n')==0:
+    template=template.replace('\n','',1)
+  self.manage_edit(title=title, connection_id=connection_id,
+                  arguments=arguments, template=template)
+  self.manage_advanced(max_rows, max_cache, cache_time, class_name, class_file)
+
+def DA_manage_FTPget(self):
+    """Get source for FTP download"""
+    self.REQUEST.RESPONSE.setHeader('Content-Type', 'text/plain')
+    return """<dtml-comment>
+title:%s
+connection_id:%s
+max_rows:%s
+max_cache:%s
+cache_time:%s
+class_name:%s
+class_file:%s
+</dtml-comment>
+<params>%s</params>
+%s""" % (self.title, self.connection_id,
+         self.max_rows_, self.max_cache_, self.cache_time_,
+         self.class_name_, self.class_file_,
+         self.arguments_src, self.src)
+
+# This function doesn't take care about properties by default
+def DA_PUT(self, REQUEST, RESPONSE):
+    """Handle put requests"""
+    if RESPONSE is not None: self.dav__init(REQUEST, RESPONSE)
+    if RESPONSE is not None: self.dav__simpleifhandler(REQUEST, RESPONSE, refresh=1)
+    body = REQUEST.get('BODY', '')
+    m = re.match('\s*<dtml-comment>(.*?)</dtml-comment>\s*\n', body, re.I | re.S)
+    if m:
+        property_src = m.group(1)
+        parameters = {}
+        for line in property_src.split('\n'):
+          pair = line.split(':',1)
+          if len(pair)!=2:
+            continue
+          parameters[pair[0].strip().lower()]=pair[1].strip()
+        # check for required and optional parameters
+        max_rows = parameters.get('max_rows',1000)
+        max_cache = parameters.get('max_cache',100)
+        cache_time = parameters.get('cache_time',0)
+        class_name = parameters.get('class_name','')
+        class_file = parameters.get('class_file','')
+        title = parameters.get('title','')
+        connection_id = parameters.get('connection_id','')
+        self.manage_advanced(max_rows, max_cache, cache_time, class_name, class_file)
+        self.title = str(title)
+        self.connection_id = str(connection_id)
+        body = body[m.end():]
+    m = re.match('\s*<params>(.*)</params>\s*\n', body, re.I | re.S)
+    if m:
+        self.arguments_src = m.group(1)
+        self._arg=parse(self.arguments_src)
+        body = body[m.end():]
+    template = body
+    self.src = template
+    self.template=t=self.template_class(template)
+    t.cook()
+    self._v_cache={}, Bucket()
+    if RESPONSE is not None: RESPONSE.setStatus(204)
+    return RESPONSE
+
+
+DA.fromFile = DA_fromFile
+DA.fromText = DA_fromText
+DA.manage_FTPget = DA_manage_FTPget
+DA.PUT = DA_PUT
diff --git a/product/ERP5Type/patches/DCWorkflow.py b/product/ERP5Type/patches/DCWorkflow.py
new file mode 100755
index 0000000000..6a56d9f071
--- /dev/null
+++ b/product/ERP5Type/patches/DCWorkflow.py
@@ -0,0 +1,300 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Optimized rendering of global actions (cache)
+
+from Products.DCWorkflow.DCWorkflow import DCWorkflowDefinition, StateChangeInfo, ObjectMoved, createExprContext, aq_parent, aq_inner
+from Products.DCWorkflow import DCWorkflow
+from Products.DCWorkflow.Transitions import TRIGGER_WORKFLOW_METHOD, TransitionDefinition
+from AccessControl import getSecurityManager, ClassSecurityInfo, ModuleSecurityInfo
+from Products.CMFCore.utils import getToolByName
+from Products.CMFCore.WorkflowCore import WorkflowException
+from Products.CMFCore.utils import  _getAuthenticatedUser
+from DocumentTemplate.DT_Util import TemplateDict
+from DateTime import DateTime
+from Products.ERP5Type.Cache import CachingMethod
+from Products.ERP5Type.Utils import convertToMixedCase
+
+def DCWorkflowDefinition_listGlobalActions(self, info):
+    '''
+    Allows this workflow to
+    include actions to be displayed in the actions box.
+    Called on every request.
+    Returns the actions to be displayed to the user.
+    '''
+    def _listGlobalActions(user=None, id=None, portal_path=None):
+      if not self.worklists:
+          return None  # Optimization
+      sm = getSecurityManager()
+      portal = self._getPortalRoot()
+      res = []
+      fmt_data = None
+      # We want to display some actions depending on the current date
+      # So, we can now put this kind of expression : <= "%(now)s"
+      # May be this patch should be moved to listFilteredActions in the future
+      info.now = DateTime()
+      for id, qdef in self.worklists.items():
+          if qdef.actbox_name:
+              guard = qdef.guard
+              # Patch for ERP5 by JP Smets in order
+              # to implement worklists and search of local roles
+              searchres_len = 0
+              var_match_keys = qdef.getVarMatchKeys()
+              if var_match_keys:
+                  # Check the catalog for items in the worklist.
+                  catalog = getToolByName(self, 'portal_catalog')
+                  dict = {}
+                  for k in var_match_keys:
+                      v = qdef.getVarMatch(k)
+                      v_fmt = map(lambda x, info=info: x%info, v)
+                      dict[k] = v_fmt
+                  # Patch for ERP5 by JP Smets in order
+                  # to implement worklists and search of local roles
+                  if not (guard is None or guard.check(sm, self, portal)):
+                      dict['local_roles'] = guard.roles
+                  # Patch to use ZSQLCatalog and get high speed
+                  # LOG("PatchedDCWorkflowDefinition", 0, dict)
+                  searchres_len = int(apply(catalog.countResults, (), dict)[0][0])
+                  if searchres_len == 0:
+                      continue
+              if fmt_data is None:
+                  fmt_data = TemplateDict()
+                  fmt_data._push(info)
+              fmt_data._push({'count': searchres_len})
+              # Patch for ERP5 by JP Smets in order
+              # to implement worklists and search of local roles
+              if dict.has_key('local_roles'):
+                fmt_data._push({'local_roles': join(guard.roles,';')})
+              else:
+                fmt_data._push({'local_roles': ''})
+              res.append((id, {'name': qdef.actbox_name % fmt_data,
+                              'url': qdef.actbox_url % fmt_data,
+                              'worklist_id': id,
+                              'workflow_title': self.title,
+                              'workflow_id': self.id,
+                              'permissions': (),  # Predetermined.
+                              'category': qdef.actbox_category}))
+              fmt_data._pop()
+      res.sort()
+      return map((lambda (id, val): val), res)
+
+    # Return Cache
+    _listGlobalActions = CachingMethod(_listGlobalActions, id='listGlobalActions', cache_duration = 300)
+    user = str(_getAuthenticatedUser(self))
+    return _listGlobalActions(user=user, id=self.id, portal_path=self._getPortalRoot().getPhysicalPath())
+
+
+DCWorkflowDefinition.listGlobalActions = DCWorkflowDefinition_listGlobalActions
+
+class ValidationFailed(Exception):
+    """Transition can not be executed because data is not in consistent state"""
+
+DCWorkflow.ValidationFailed = ValidationFailed
+
+ModuleSecurityInfo('Products.DCWorkflow.DCWorkflow').declarePublic('ValidationFailed')
+
+
+def DCWorkflowDefinition_executeTransition(self, ob, tdef=None, kwargs=None):
+    '''
+    Private method.
+    Puts object in a new state.
+    '''
+    sci = None
+    econtext = None
+    moved_exc = None
+
+    # Figure out the old and new states.
+    old_sdef = self._getWorkflowStateOf(ob)
+    old_state = old_sdef.getId()
+    if tdef is None:
+        new_state = self.initial_state
+        former_status = {}
+    else:
+        new_state = tdef.new_state_id
+        if not new_state:
+            # Stay in same state.
+            new_state = old_state
+        former_status = self._getStatusOf(ob)
+    new_sdef = self.states.get(new_state, None)
+    if new_sdef is None:
+        raise WorkflowException, (
+            'Destination state undefined: ' + new_state)
+
+    # Execute the "before" script.
+    before_script_success = 1
+    if tdef is not None and tdef.script_name:
+        script = self.scripts[tdef.script_name]
+        # Pass lots of info to the script in a single parameter.
+        sci = StateChangeInfo(
+            ob, self, former_status, tdef, old_sdef, new_sdef, kwargs)
+        try:
+            #LOG('_executeTransition', 0, "script = %s, sci = %s" % (repr(script), repr(sci)))
+            script(sci)  # May throw an exception.
+        except ValidationFailed, validation_exc:
+            before_script_success = 0
+            before_script_error_message = str(validation_exc)
+        except ObjectMoved, moved_exc:
+            ob = moved_exc.getNewObject()
+            # Re-raise after transition
+
+    # Update variables.
+    state_values = new_sdef.var_values
+    if state_values is None: state_values = {}
+    tdef_exprs = None
+    if tdef is not None: tdef_exprs = tdef.var_exprs
+    if tdef_exprs is None: tdef_exprs = {}
+    status = {}
+    for id, vdef in self.variables.items():
+        if not vdef.for_status:
+            continue
+        expr = None
+        if state_values.has_key(id):
+            value = state_values[id]
+        elif tdef_exprs.has_key(id):
+            expr = tdef_exprs[id]
+        elif not vdef.update_always and former_status.has_key(id):
+            # Preserve former value
+            value = former_status[id]
+        else:
+            if vdef.default_expr is not None:
+                expr = vdef.default_expr
+            else:
+                value = vdef.default_value
+        if expr is not None:
+            # Evaluate an expression.
+            if econtext is None:
+                # Lazily create the expression context.
+                if sci is None:
+                    sci = StateChangeInfo(
+                        ob, self, former_status, tdef,
+                        old_sdef, new_sdef, kwargs)
+                econtext = createExprContext(sci)
+            value = expr(econtext)
+        status[id] = value
+
+    # Do not proceed in case of failure of before script
+    if not before_script_success:
+        status[self.state_var] = old_state # Remain in state
+        tool = aq_parent(aq_inner(self))
+        tool.setStatusOf(self.id, ob, status)
+        sci = StateChangeInfo(
+            ob, self, status, tdef, old_sdef, new_sdef, kwargs)
+        sci.setWorkflowVariable(ob, workflow_id=self.id, error_message = before_script_error_message)
+        return new_sdef
+
+    # Update state.
+    status[self.state_var] = new_state
+    tool = aq_parent(aq_inner(self))
+    tool.setStatusOf(self.id, ob, status)
+
+    # Make sure that the error message is empty. # Why ?
+    #sci = StateChangeInfo(
+    #    ob, self, status, tdef, old_sdef, new_sdef, kwargs)
+    #sci.setWorkflowVariable(ob, error_message = '')
+
+    # Update role to permission assignments.
+    self.updateRoleMappingsFor(ob)
+
+    # Execute the "after" script.
+    if tdef is not None and tdef.after_script_name:
+        # Script can be either script or workflow method
+        #LOG('_executeTransition', 0, 'new_sdef.transitions = %s' % (repr(new_sdef.transitions)))
+        if tdef.after_script_name in filter(lambda k: self.transitions[k].trigger_type == TRIGGER_WORKFLOW_METHOD,
+                                                                                  new_sdef.transitions):
+          script = getattr(ob, convertToMixedCase(tdef.after_script_name))
+          script()
+        else:
+          script = self.scripts[tdef.after_script_name]
+          # Pass lots of info to the script in a single parameter.
+          sci = StateChangeInfo(
+              ob, self, status, tdef, old_sdef, new_sdef, kwargs)
+          script(sci)  # May throw an exception.
+
+    # Return the new state object.
+    if moved_exc is not None:
+        # Propagate the notification that the object has moved.
+        raise moved_exc
+    else:
+        return new_sdef
+
+
+DCWorkflowDefinition._executeTransition = DCWorkflowDefinition_executeTransition
+from Products.DCWorkflow.utils import modifyRolesForPermission
+
+# Patch updateRoleMappingsFor so that if 2 workflows define security, then we
+# should do an AND operation between each permission
+def updateRoleMappingsFor(self, ob):
+    '''
+    Changes the object permissions according to the current
+    state.
+    '''
+    changed = 0
+    sdef = self._getWorkflowStateOf(ob)
+
+    tool = aq_parent(aq_inner(self))
+    other_workflow_list = \
+       [x for x in tool.getWorkflowsFor(ob) if x.id != self.id and isinstance(x,DCWorkflowDefinition)]
+    other_data_list = []
+    for other_workflow in other_workflow_list:
+      other_sdef = other_workflow._getWorkflowStateOf(ob)
+      if other_sdef is not None and other_sdef.permission_roles is not None:
+        other_data_list.append((other_workflow,other_sdef))
+    # Be carefull, permissions_roles should not change
+    # from list to tuple or vice-versa. (in modifyRolesForPermission, 
+    # list means acquire roles, tuple means do not acquire)
+    if sdef is not None and self.permissions:
+        for p in self.permissions:
+            roles = []
+            refused_roles = []
+            role_type = 'list'
+            if sdef.permission_roles is not None:
+                roles = sdef.permission_roles.get(p, roles)
+                if type(roles) is type(()):
+                  role_type = 'tuple'
+                roles = list(roles)
+            # We will check that each role is activated
+            # in each DCWorkflow
+            for other_workflow,other_sdef in other_data_list:
+              if p in other_workflow.permissions:
+                for role in roles:
+                  other_roles = other_sdef.permission_roles.get(p, [])
+                  if type(other_roles) is type(()) :
+                    role_type = 'tuple'
+                  if role not in other_roles :
+                    refused_roles.append(role)
+            for role in refused_roles :
+              roles.remove(role)
+            if role_type=='tuple':
+              roles = tuple(roles)
+            if modifyRolesForPermission(ob, p, roles):
+                changed = 1
+    return changed
+
+DCWorkflowDefinition.updateRoleMappingsFor = updateRoleMappingsFor
+
+# This patch allows to use workflowmethod as an after_script
+# However, the right way of doing would be to have a combined state of TRIGGER_USER_ACTION and TRIGGER_WORKFLOW_METHOD
+# as well as workflow inheritance. This way, different user actions and dialogs can be specified easliy
+# For now, we split UI transitions and logics transitions so that UI can be different and logics the same
+
+class ERP5TransitionDefinition (TransitionDefinition):
+  """
+    This class is only for backward compatibility.
+  """
+  pass  
+
+def getAvailableScriptIds(self):
+    return self.getWorkflow().scripts.keys() + [k for k in self.getWorkflow().transitions.keys() if self.getWorkflow().transitions[k].trigger_type == TRIGGER_WORKFLOW_METHOD]
+
+TransitionDefinition.getAvailableScriptIds = ERP5TransitionDefinition.getAvailableScriptIds
diff --git a/product/ERP5Type/patches/Expression.py b/product/ERP5Type/patches/Expression.py
new file mode 100755
index 0000000000..ec2b84e528
--- /dev/null
+++ b/product/ERP5Type/patches/Expression.py
@@ -0,0 +1,22 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Expression patch
+
+from Products.CMFCore.Expression import Expression
+
+def Expression_hash(self):
+  return hash(self.text)
+
+Expression.__hash__ = Expression_hash
diff --git a/product/ERP5Type/patches/MembershipTool.py b/product/ERP5Type/patches/MembershipTool.py
new file mode 100755
index 0000000000..392b3c6bce
--- /dev/null
+++ b/product/ERP5Type/patches/MembershipTool.py
@@ -0,0 +1,17 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Folder naming: member folder should be names as a singular in small caps
+from Products.CMFDefault.MembershipTool import MembershipTool
+MembershipTool.membersfolder_id = 'member'
diff --git a/product/ERP5Type/patches/ObjectManager.py b/product/ERP5Type/patches/ObjectManager.py
new file mode 100755
index 0000000000..c13cbdd572
--- /dev/null
+++ b/product/ERP5Type/patches/ObjectManager.py
@@ -0,0 +1,40 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Import: add rename feature
+from OFS.ObjectManager import ObjectManager, customImporters
+
+def ObjectManager_importObjectFromFile(self, filepath, verify=1, set_owner=1, id=None):
+    #LOG('_importObjectFromFile, filepath',0,filepath)
+    # locate a valid connection
+    connection=self._p_jar
+    obj=self
+
+    while connection is None:
+        obj=obj.aq_parent
+        connection=obj._p_jar
+    ob=connection.importFile(
+        filepath, customImporters=customImporters)
+    if verify: self._verifyObjectPaste(ob, validate_src=0)
+    if id is None:
+      id=ob.id
+    if hasattr(id, 'im_func'): id=id()
+    self._setObject(id, ob, set_owner=set_owner)
+
+    # try to make ownership implicit if possible in the context
+    # that the object was imported into.
+    ob=self._getOb(id)
+    ob.manage_changeOwnershipType(explicit=0)
+
+ObjectManager._importObjectFromFile=ObjectManager_importObjectFromFile
diff --git a/product/ERP5Type/patches/ProductContext.py b/product/ERP5Type/patches/ProductContext.py
new file mode 100755
index 0000000000..7956d89990
--- /dev/null
+++ b/product/ERP5Type/patches/ProductContext.py
@@ -0,0 +1,31 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# AttrDict patch for more dict-like methods.
+try:
+    from App.ProductContext import AttrDict
+
+    def AttrDict_getitem(self, name):
+        try:
+            return getattr(self.ob, name)
+        except AttributeError:
+            raise KeyError
+
+    def AttrDict_has_key(self, name):
+        return hasattr(self.ob, name)
+
+    AttrDict.__getitem__ = AttrDict_getitem
+    AttrDict.has_key = AttrDict_has_key
+except ImportError:
+    pass
diff --git a/product/ERP5Type/patches/PropertiedUser.py b/product/ERP5Type/patches/PropertiedUser.py
new file mode 100755
index 0000000000..8d9749e35f
--- /dev/null
+++ b/product/ERP5Type/patches/PropertiedUser.py
@@ -0,0 +1,188 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights
+# Reserved.
+# Copyright (c) 2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this
+# distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+
+# Locale roles acquisition patch for PAS
+
+from Acquisition import aq_inner, aq_parent
+
+try:
+  from PluggableAuthService.PropertiedUser import PropertiedUser
+except ImportError:
+  PropertiedUser = None
+  
+def getRolesInContext( self, object ):
+
+    """ Return the list of roles assigned to the user.
+
+    o Include local roles assigned in context of the passed-in object.
+
+    o Include *both* local roles assigned directly to us *and* those
+      assigned to our groups.
+
+    o Ripped off from AccessControl.User.BasicUser, which provides
+      no other extension mechanism. :(
+    """
+    user_id = self.getId()
+    # [ x.getId() for x in self.getGroups() ]
+    group_ids = self.getGroups()
+
+    principal_ids = list( group_ids )
+    principal_ids.insert( 0, user_id )
+
+    local ={} 
+    object = aq_inner( object )
+
+    while 1:
+
+        local_roles = getattr( object, '__ac_local_roles__', None )
+
+        if local_roles:
+
+            if callable( local_roles ):
+                local_roles = local_roles()
+
+            dict = local_roles or {}
+
+            for principal_id in principal_ids:
+                for role in dict.get( principal_id, [] ):
+                    local[ role ] = 1
+                    
+        # patch by Klaus for LocalRole blocking
+        if hasattr(object, '_getAcquireLocalRoles'):
+            if not object._getAcquireLocalRoles():
+                break
+
+        inner = aq_inner( object )
+        parent = aq_parent( inner )
+
+        if parent is not None:
+            object = parent
+            continue
+
+        new = getattr( object, 'im_self', None )
+
+        if new is not None:
+
+            object = aq_inner( new )
+            continue
+
+        break
+    
+    return list( self.getRoles() ) + local.keys()
+
+def allowed( self, object, object_roles=None ):
+
+    """ Check whether the user has access to object.
+
+    o The user must have one of the roles in object_roles to allow access.
+
+    o Include *both* local roles assigned directly to us *and* those
+      assigned to our groups.
+
+    o Ripped off from AccessControl.User.BasicUser, which provides
+      no other extension mechanism. :(
+    """
+    if object_roles is _what_not_even_god_should_do:
+        return 0
+
+    # Short-circuit the common case of anonymous access.
+    if object_roles is None or 'Anonymous' in object_roles:
+        return 1
+
+    # Provide short-cut access if object is protected by 'Authenticated'
+    # role and user is not nobody
+    if 'Authenticated' in object_roles and (
+        self.getUserName() != 'Anonymous User'):
+        return 1
+
+    # Check for ancient role data up front, convert if found.
+    # This should almost never happen, and should probably be
+    # deprecated at some point.
+    if 'Shared' in object_roles:
+        object_roles = self._shared_roles(object)
+        if object_roles is None or 'Anonymous' in object_roles:
+            return 1
+
+    # Check for a role match with the normal roles given to
+    # the user, then with local roles only if necessary. We
+    # want to avoid as much overhead as possible.
+    user_roles = self.getRoles()
+    for role in object_roles:
+        if role in user_roles:
+            if self._check_context(object):
+                return 1
+            return None
+
+    # Still have not found a match, so check local roles. We do
+    # this manually rather than call getRolesInContext so that
+    # we can incur only the overhead required to find a match.
+    inner_obj = aq_inner( object )
+    user_id = self.getId()
+    # [ x.getId() for x in self.getGroups() ]
+    group_ids = self.getGroups()
+
+    principal_ids = list( group_ids )
+    principal_ids.insert( 0, user_id )
+
+    while 1:
+
+        local_roles = getattr( inner_obj, '__ac_local_roles__', None )
+
+        if local_roles:
+
+            if callable( local_roles ):
+                local_roles = local_roles()
+
+            dict = local_roles or {}
+
+            for principal_id in principal_ids:
+
+                local_roles = dict.get( principal_id, [] )
+
+                for role in object_roles:
+
+                    if role in local_roles:
+
+                        if self._check_context( object ):
+                            return 1
+
+                        return 0
+                    
+        # patch by Klaus for LocalRole blocking
+        if hasattr(object, '_getAcquireLocalRoles'):
+            if not object._getAcquireLocalRoles():
+                break
+
+        inner = aq_inner( inner_obj )
+        parent = aq_parent( inner )
+
+        if parent is not None:
+            inner_obj = parent
+            continue
+
+        new = getattr( inner_obj, 'im_self', None )
+
+        if new is not None:
+            inner_obj = aq_inner( new )
+            continue
+
+        break
+
+    return None
+
+if PropertiedUser is not None:
+  PropertiedUser.getRolesInContext = getRolesInContext
+  PropertiedUser.allowed = allowed
diff --git a/product/ERP5Type/patches/PropertyManager.py b/product/ERP5Type/patches/PropertyManager.py
new file mode 100755
index 0000000000..6ac0475960
--- /dev/null
+++ b/product/ERP5Type/patches/PropertyManager.py
@@ -0,0 +1,175 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Properties
+from OFS.PropertyManager import PropertyManager, type_converters
+from OFS.PropertyManager import escape
+from Globals import DTMLFile
+from Products.ERP5Type.Utils import createExpressionContext
+from Products.ERP5Type.ERP5Type import ERP5TypeInformation
+from Products.CMFCore.Expression import Expression
+from Products.ERP5Type import _dtmldir
+
+class ERP5PropertyManager(PropertyManager):
+  """
+    This class is only for backward compatibility.
+  """
+  pass
+
+PropertyManager_manage_propertiesForm=DTMLFile('properties',
+                                               _dtmldir,
+                                               property_extensible_schema__=1)
+
+
+def PropertyManager_updateProperty(self, id, value):
+    # Update the value of an existing property. If value
+    # is a string, an attempt will be made to convert
+    # the value to the type of the existing property.
+    self._wrapperCheck(value)
+    if not hasattr(self, 'isRADContent'):
+      if not self.hasProperty(id):
+          raise 'Bad Request', 'The property %s does not exist' % escape(id)
+    if type(value)==type(''):
+        proptype=self.getPropertyType(id) or 'string'
+        if type_converters.has_key(proptype):
+            value=type_converters[proptype](value)
+    #LOG('_updateProperty', 0, 'self = %r, id = %r, value = %r' % (self, id, value))
+    self._setPropValue(id, value)
+
+def PropertyManager_hasProperty(self, id):
+    """Return true if object has a property 'id'"""
+    for p in self.propertyIds():
+        if id==p:
+            return 1
+    return 0
+
+def PropertyManager_getProperty(self, id, d=None, evaluate=1):
+    """Get the property 'id', returning the optional second
+        argument or None if no such property is found."""
+    type = self.getPropertyType(id)
+    if evaluate and type == 'tales':
+        value = getattr(self, id)
+        expression = Expression(value)
+        econtext = createExpressionContext(self)
+        return expression(econtext)
+    elif type:
+      return getattr(self, id)
+    return d
+
+def PropertyManager_getPropertyType(self, id):
+    """Get the type of property 'id', returning None if no
+      such property exists"""
+    for md in self._propertyMap():
+        if md['id']==id:
+            return md.get('type', 'string')
+    return None
+
+def PropertyManager_setProperty(self, id, value, type=None):
+    # for selection and multiple selection properties
+    # the value argument indicates the select variable
+    # of the property
+
+    if type is None:
+      # Generate a default type
+      value_type = type(value)
+      if value_type in (type([]), type(())):
+        type = 'lines'
+      elif value_type is type(1):
+        type = 'int'
+      elif value_type is type(1L):
+        type = 'long'
+      elif value_type is type(1.0):
+        type = 'float'
+      elif value_type is type('a'):
+        if len(value_type.split('\n')) > 1:
+          type = 'text'
+        else:
+          type = 'string'
+      else:
+        type = 'string'
+
+    self._wrapperCheck(value)
+    if not self.valid_property_id(id):
+        raise 'Bad Request', 'Invalid or duplicate property id'
+
+    if type in ('selection', 'multiple selection'):
+        if not hasattr(self, value):
+            raise 'Bad Request', 'No select variable %s' % value
+        self._local_properties=getattr(self, '_local_properties', ()) + (
+            {'id':id, 'type':type, 'select_variable':value},)
+        if type=='selection':
+            self._setPropValue(id, '')
+        else:
+            self._setPropValue(id, [])
+    else:
+        self._local_properties=getattr(self, '_local_properties', ())+({'id':id,'type':type},)
+        self._setPropValue(id, value)
+
+def PropertyManager_delProperty(self, id):
+    if not self.hasProperty(id):
+        raise ValueError, 'The property %s does not exist' % escape(id)
+    self._delPropValue(id)
+    self._local_properties=tuple(filter(lambda i, n=id: i['id'] != n,
+                                  getattr(self, '_local_properties', ())))
+
+def PropertyManager_propertyIds(self):
+    """Return a list of property ids """
+    return map(lambda i: i['id'], self._propertyMap())
+
+def PropertyManager_propertyValues(self):
+    """Return a list of actual property objects """
+    return map(lambda i,s=self: getattr(s,i['id']), self._propertyMap())
+
+def PropertyManager_propertyItems(self):
+    """Return a list of (id,property) tuples """
+    return map(lambda i,s=self: (i['id'],getattr(s,i['id'])), self._propertyMap())
+
+def PropertyManager_propertyMap(self):
+    """Return a tuple of mappings, giving meta-data for properties """
+    return tuple(list(self._properties) + list(getattr(self, '_local_properties', ())))
+
+def PropertyManager_propdict(self):
+    dict={}
+    for p in self._propertyMap():
+        dict[p['id']]=p
+    return dict
+
+def PropertyManager_manage_addProperty(self, id, value, type, REQUEST=None):
+    """Add a new property via the web. Sets a new property with
+    the given id, type, and value."""
+    if type_converters.has_key(type):
+        value=type_converters[type](value)
+    #LOG('manage_addProperty', 0, 'id = %r, value = %r, type = %r, REQUEST = %r' % (id, value, type, REQUEST))
+    self._setProperty(id.strip(), value, type)
+    if REQUEST is not None:
+        return self.manage_propertiesForm(self, REQUEST)
+
+PropertyManager.manage_addProperty = PropertyManager_manage_addProperty
+PropertyManager.manage_propertiesForm = PropertyManager_manage_propertiesForm
+PropertyManager._updateProperty = PropertyManager_updateProperty
+PropertyManager.getPropertyType = PropertyManager_getPropertyType
+PropertyManager._setProperty = PropertyManager_setProperty
+PropertyManager._delProperty = PropertyManager_delProperty
+PropertyManager.propertyIds = PropertyManager_propertyIds
+PropertyManager.propertyValues = PropertyManager_propertyValues
+PropertyManager.propertyItems = PropertyManager_propertyItems
+PropertyManager._propertyMap = PropertyManager_propertyMap
+PropertyManager.propdict = PropertyManager_propdict
+PropertyManager.hasProperty = PropertyManager_hasProperty
+PropertyManager.getProperty = PropertyManager_getProperty
+ERP5TypeInformation.manage_propertiesForm = PropertyManager_manage_propertiesForm
+
+from ZPublisher.Converters import type_converters, field2string
+
+type_converters['tales'] = field2string
diff --git a/product/ERP5Type/patches/States.py b/product/ERP5Type/patches/States.py
new file mode 100755
index 0000000000..5f85d36ae8
--- /dev/null
+++ b/product/ERP5Type/patches/States.py
@@ -0,0 +1,42 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# State types patch for DCWorkflow
+from Products.DCWorkflow.States import StateDefinition
+from Globals import DTMLFile
+from Products.ERP5Type import _dtmldir
+
+_properties_form = DTMLFile('state_properties', _dtmldir)
+
+def getAvailableTypeList(self):
+  """This is a method specific to ERP5. This returns a list of state types, which are used for portal methods.
+  """
+  return ('current_inventory', 'reserved_inventory', 'future_inventory',
+          'draft_order', 'planned_order', )
+  
+def setProperties(self, title='', transitions=(), REQUEST=None, description='', type_list=()):
+    '''
+    '''
+    self.title = str(title)
+    self.description = str(description)
+    self.transitions = tuple(map(str, transitions))
+    # This is patched by yo.
+    self.type_list = tuple(type_list)
+    if REQUEST is not None:
+        return self.manage_properties(REQUEST, 'Properties changed.')
+
+StateDefinition._properties_form = _properties_form
+StateDefinition.getAvailableTypeList = getAvailableTypeList
+StateDefinition.setProperties = setProperties
+StateDefinition.type_list = ()
\ No newline at end of file
diff --git a/product/ERP5Type/patches/Transaction.py b/product/ERP5Type/patches/Transaction.py
new file mode 100755
index 0000000000..868e05619e
--- /dev/null
+++ b/product/ERP5Type/patches/Transaction.py
@@ -0,0 +1,179 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Adding commit_prepare to the zodb transaction
+try:
+    from ZODB import Transaction, POSException
+    
+    hosed = Transaction.hosed
+    free_transaction = Transaction.free_transaction
+    jar_cmp = Transaction.jar_cmp
+    
+    def commit(self, subtransaction=None):
+        """Finalize the transaction."""
+        objects = self._objects
+        
+        subjars = []
+        if subtransaction:
+            if self._sub is None:
+                # Must store state across multiple subtransactions
+                # so that the final commit can commit all subjars.
+                self._sub = {}
+        else:
+            if self._sub is not None:
+                # This commit is for a top-level transaction that
+                # has previously committed subtransactions.  Do
+                # one last subtransaction commit to clear out the
+                # current objects, then commit all the subjars.
+                if objects:
+                    self.commit(1)
+                    objects = []
+                subjars = self._sub.values()
+                subjars.sort(jar_cmp)
+                self._sub = None
+                
+                # If there were any non-subtransaction-aware jars
+                # involved in earlier subtransaction commits, we need
+                # to add them to the list of jars to commit.
+                if self._non_st_objects is not None:
+                    objects.extend(self._non_st_objects)
+                    self._non_st_objects = None
+
+        if (objects or subjars) and hosed:
+            # Something really bad happened and we don't
+            # trust the system state.
+            raise POSException.TransactionError, hosed_msg
+
+        # It's important that:
+        #
+        # - Every object in self._objects is either committed or
+        #   aborted.
+        #
+        # - For each object that is committed we call tpc_begin on
+        #   it's jar at least once
+        #
+        # - For every jar for which we've called tpc_begin on, we
+        #   either call tpc_abort or tpc_finish. It is OK to call
+        #   these multiple times, as the storage is required to ignore
+        #   these calls if tpc_begin has not been called.
+        #
+        # - That we call tpc_begin() in a globally consistent order,
+        #   so that concurrent transactions involving multiple storages
+        #   do not deadlock.
+        try:
+            ncommitted = 0
+            # Do prepare until number of jars is stable - this could
+            # create infinite loop
+            jars_len = -1
+            jars = self._get_jars(objects, subtransaction)
+            objects_len = len(self._objects)
+            while len(jars) != jars_len:
+                jars_len = len(jars)
+                self._commit_prepare(jars, subjars, subtransaction)
+                if len(self._objects) != objects_len:
+                  objects.extend(self._objects[objects_len:])
+                  objects_len = len(self._objects)
+                jars = self._get_jars(objects, subtransaction)
+            try:
+                # If not subtransaction, then jars will be modified.
+                self._commit_begin(jars, subjars, subtransaction)
+                ncommitted += self._commit_objects(objects)
+                if not subtransaction:
+                    # Unless this is a really old jar that doesn't
+                    # implement tpc_vote(), it must raise an exception
+                    # if it can't commit the transaction.
+                    for jar in jars:
+                        try:
+                            vote = jar.tpc_vote
+                        except AttributeError:
+                            pass
+                        else:
+                            vote(self)
+
+                # Handle multiple jars separately.  If there are
+                # multiple jars and one fails during the finish, we
+                # mark this transaction manager as hosed.
+                if len(jars) == 1:
+                    self._finish_one(jars[0])
+                else:
+                    self._finish_many(jars)
+            except:
+                # Ugh, we got an got an error during commit, so we
+                # have to clean up.  First save the original exception
+                # in case the cleanup process causes another
+                # exception.
+                error = sys.exc_info()
+                try:
+                    self._commit_error(objects, ncommitted, jars, subjars)
+                except:
+                    LOG('ZODB', ERROR,
+                        "A storage error occured during transaction "
+                        "abort.  This shouldn't happen.",
+                        error=error)
+                raise error[0], error[1], error[2]
+        finally:
+            del objects[:] # clear registered
+            if not subtransaction and self._id is not None:
+                free_transaction()
+
+    def _commit_prepare(self, jars, subjars, subtransaction):
+        if subtransaction:
+            assert not subjars
+            for jar in jars:
+                try:
+                    jar.tpc_prepare(self, subtransaction)
+                except TypeError:
+                    # Assume that TypeError means that tpc_begin() only
+                    # takes one argument, and that the jar doesn't
+                    # support subtransactions.
+                    jar.tpc_prepare(self)
+                except AttributeError:
+                    # Assume that KeyError means that tpc_prepare
+                    # not available
+                    pass
+        else:
+            # Merge in all the jars used by one of the subtransactions.
+            
+            # When the top-level subtransaction commits, the tm must
+            # call commit_sub() for each jar involved in one of the
+            # subtransactions.  The commit_sub() method should call
+            # tpc_begin() on the storage object.
+            
+            # It must also call tpc_begin() on jars that were used in
+            # a subtransaction but don't support subtransactions.
+            
+            # These operations must be performed on the jars in order.
+            
+            # Modify jars inplace to include the subjars, too.
+            jars += subjars
+            jars.sort(jar_cmp)
+            # assume that subjars is small, so that it's cheaper to test
+            # whether jar in subjars than to make a dict and do has_key.
+            for jar in jars:
+                #if jar in subjars:
+                #  pass
+                #else:
+                try:
+                    jar.tpc_prepare(self)
+                except AttributeError:
+                    # Assume that KeyError means that tpc_prepare
+                    # not available
+                    pass
+
+    Transaction.Transaction.commit = commit
+    Transaction.Transaction._commit_prepare = _commit_prepare
+except ImportError:
+    # On Zope 2.8, do not patch Transaction. Instead, we use a before commit hook.
+    pass
diff --git a/product/ERP5Type/patches/WorkflowTool.py b/product/ERP5Type/patches/WorkflowTool.py
new file mode 100755
index 0000000000..6838ca6063
--- /dev/null
+++ b/product/ERP5Type/patches/WorkflowTool.py
@@ -0,0 +1,91 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Make sure Interaction Workflows are called even if method not wrapped
+
+from Products.CMFCore.WorkflowTool import WorkflowTool
+from Products.DCWorkflow.DCWorkflow import DCWorkflowDefinition
+
+def WorkflowTool_wrapWorkflowMethod(self, ob, method_id, func, args, kw):
+
+    """ To be invoked only by WorkflowCore.
+        Allows a workflow definition to wrap a WorkflowMethod.
+
+        By default, the workflow tool takes the first workflow wich
+        support the method_id. In ERP5, with Interaction Worfklows, we
+        may have many workflows wich can support a worfklow method,
+        that's why we need this patch
+
+        We should have 1 or 0 classic workflow (ie a DCWorkflow), and
+        0 or many Interaction workflows. We should take care that the
+        method will be called once
+    """
+    # Check workflow containing the workflow method
+    wf_list = []
+    wfs = self.getWorkflowsFor(ob)
+    if wfs:
+      for w in wfs:
+#         LOG('ERP5WorkflowTool.wrapWorkflowMethod, is wfMSupported', 0, 
+#              repr((w.isWorkflowMethodSupported(ob, method_id), 
+#                    w.getId(), ob, method_id )))
+        if (hasattr(w, 'isWorkflowMethodSupported')
+          and w.isWorkflowMethodSupported(ob, method_id)):
+          #wf = w
+          #break
+          wf_list.append(w)
+    else:
+      wfs = ()
+    # If no transition matched, simply call the method    
+    # And return
+    if len(wf_list)==0:
+      return apply(func, args, kw)
+    # Call notifyBefore on each workflow
+    for w in wfs:
+      w.notifyBefore(ob, method_id, args=args, kw=kw)
+    # Call the method on matching workflows
+    only_interaction_defined = 1
+    for w in wf_list:
+      if w.__class__.__name__ != 'InteractionWorkflowDefinition':
+        only_interaction_defined = 0
+        result = self._invokeWithNotification(
+            [], ob, method_id, w.wrapWorkflowMethod,
+            (ob, method_id, func, args, kw), {})
+    # If only interaction workflows are defined, we need to call the method
+    # manually
+    if only_interaction_defined:
+      result = apply(func, args, kw)
+    # Call notifySuccess on each workflow
+    for w in wfs:
+      w.notifySuccess(ob, method_id, result, args=args, kw=kw)
+    return result
+    
+WorkflowTool.wrapWorkflowMethod = WorkflowTool_wrapWorkflowMethod
+
+def DCWorkflowDefinition_notifyBefore(self, ob, action, args=None, kw=None):
+    '''
+    Notifies this workflow of an action before it happens,
+    allowing veto by exception.  Unless an exception is thrown, either
+    a notifySuccess() or notifyException() can be expected later on.
+    The action usually corresponds to a method name.
+    '''
+    pass
+
+def DCWorkflowDefinition_notifySuccess(self, ob, action, result, args=None, kw=None):
+    '''
+    Notifies this workflow that an action has taken place.
+    '''
+    pass
+
+DCWorkflowDefinition.notifyBefore = DCWorkflowDefinition_notifyBefore
+DCWorkflowDefinition.notifySuccess = DCWorkflowDefinition_notifySuccess
diff --git a/product/ERP5Type/patches/XMLExportImport.py b/product/ERP5Type/patches/XMLExportImport.py
new file mode 100755
index 0000000000..fb0b9e6109
--- /dev/null
+++ b/product/ERP5Type/patches/XMLExportImport.py
@@ -0,0 +1,170 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Make sure the xml export will be ordered
+
+from Shared.DC.xml import ppml
+from base64 import encodestring
+from cStringIO import StringIO
+try:
+  from ZODB.serialize import referencesf
+except ImportError:
+  from ZODB.referencesf import referencesf
+from ZODB.ExportImport import TemporaryFile
+from pickle import Pickler, EMPTY_DICT, MARK, DICT
+from cPickle import loads, dumps
+from types import *
+from OFS import XMLExportImport
+
+# Jython has PyStringMap; it's a dict subclass with string keys
+try:
+    from org.python.core import PyStringMap
+except ImportError:
+    PyStringMap = None
+
+# Ordered pickles
+class OrderedPickler(Pickler):
+
+    dispatch = Pickler.dispatch.copy()
+
+    def save_dict(self, obj):
+        write = self.write
+
+        if self.bin:
+            write(EMPTY_DICT)
+        else:   # proto 0 -- can't use EMPTY_DICT
+            write(MARK + DICT)
+
+        self.memoize(obj)
+        item_list = obj.items() # New version by JPS for sorting
+        item_list.sort(lambda a, b: cmp(a[0], b[0])) # New version by JPS for sorting
+        self._batch_setitems(item_list.__iter__())
+
+    dispatch[DictionaryType] = save_dict
+    if not PyStringMap is None:
+        dispatch[PyStringMap] = save_dict
+
+def reorderPickle(jar, p):
+    from ZODB.ExportImport import Ghost, Unpickler, Pickler, StringIO, persistent_id
+
+    oids = {}
+    storage = jar._storage
+    new_oid = storage.new_oid
+    store = storage.store
+
+    def persistent_load(ooid,
+                        Ghost=Ghost,
+                        oids=oids, wrote_oid=oids.has_key,
+                        new_oid=storage.new_oid):
+
+        "Remap a persistent id to an existing ID and create a ghost for it."
+
+        if type(ooid) is TupleType: ooid, klass = ooid
+        else: klass=None
+
+        try:
+          Ghost=Ghost()
+          Ghost.oid=ooid
+        except TypeError:
+          Ghost=Ghost(ooid)
+        return Ghost
+
+
+    # Reorder pickle by doing I/O
+    pfile = StringIO(p)
+    unpickler=Unpickler(pfile)
+    unpickler.persistent_load=persistent_load
+
+    newp=StringIO()
+    pickler=OrderedPickler(newp,1)
+    pickler.persistent_id=persistent_id
+
+    pickler.dump(unpickler.load())
+    obj = unpickler.load()
+    pickler.dump(obj)
+    p=newp.getvalue()
+    return obj, p
+
+def XMLrecord(oid, plen, p, id_mapping):
+    # Proceed as usual
+    q=ppml.ToXMLUnpickler
+    f=StringIO(p)
+    u=q(f)
+    id=ppml.u64(oid)
+    id = id_mapping[id]
+    old_aka = encodestring(oid)[:-1]
+    aka=encodestring(ppml.p64(long(id)))[:-1]  # Rebuild oid based on mapped id
+    id_mapping.setConvertedAka(old_aka, aka)
+    u.idprefix=str(id)+'.'
+    p=u.load(id_mapping=id_mapping).__str__(4)
+    if f.tell() < plen:
+        p=p+u.load(id_mapping=id_mapping).__str__(4)
+    String='  <record id="%s" aka="%s">\n%s  </record>\n' % (id, aka, p)
+    return String
+
+XMLExportImport.XMLrecord = XMLrecord
+
+def exportXML(jar, oid, file=None):
+
+    if file is None: file=TemporaryFile()
+    elif type(file) is StringType: file=open(file,'w+b')
+    id_mapping = ppml.MinimalMapping()
+    #id_mapping = ppml.IdentityMapping()
+    write=file.write
+    write('<?xml version="1.0"?>\012<ZopeData>\012')
+    version=jar._version
+    ref=referencesf
+    oids=[oid]
+    done_oids={}
+    done=done_oids.has_key
+    load=jar._storage.load
+    original_oid = oid
+    reordered_pickle = []
+    # Build mapping for refs
+    while oids:
+        oid=oids[0]
+        del oids[0]
+        if done(oid): continue
+        done_oids[oid]=1
+        try: p, serial = load(oid, version)
+        except: pass # Ick, a broken reference
+        else:
+            o, p = reorderPickle(jar, p)
+            reordered_pickle.append((oid, o, p))
+            XMLrecord(oid,len(p),p, id_mapping)
+            # Determine new oids added to the list after reference calculation
+            old_oids = tuple(oids)
+            ref(p, oids)
+            new_oids = []
+            for i in oids:
+                if i not in old_oids: new_oids.append(i)
+            # Sort new oids based on id of object
+            new_oidict = {}
+            for oid in new_oids:
+                try:
+                    p, serial = load(oid, version)
+                    o, p = reorderPickle(jar, p)
+                    new_oidict[oid] = getattr(o, 'id', None)
+                except:
+                    new_oidict[oid] = None # Ick, a broken reference
+            new_oids.sort(lambda a,b: cmp(new_oidict[a], new_oidict[b]))
+            # Build new sorted oids
+            oids = list(old_oids) + new_oids
+    # Do real export
+    for (oid, o, p) in reordered_pickle:
+        write(XMLrecord(oid,len(p),p, id_mapping))
+    write('</ZopeData>\n')
+    return file
+
+XMLExportImport.exportXML = exportXML
diff --git a/product/ERP5Type/patches/__init__.py b/product/ERP5Type/patches/__init__.py
new file mode 100755
index 0000000000..e69de29bb2
diff --git a/product/ERP5Type/patches/ppml.py b/product/ERP5Type/patches/ppml.py
new file mode 100755
index 0000000000..c65ab281b6
--- /dev/null
+++ b/product/ERP5Type/patches/ppml.py
@@ -0,0 +1,674 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# Import everything right now, not after
+# or new patch will not work
+from Shared.DC.xml.ppml import *
+
+class Global:
+
+    def __init__(self, module, name, mapping):
+        self.module=module
+        self.name=name
+        self.mapping = mapping
+
+    def __str__(self, indent=0):
+        id = ''
+        if hasattr(self, 'id'):
+            if self.mapping.isMarked(self.id): id=' id="%s"' % self.mapping[self.id]
+        name=string.lower(self.__class__.__name__)
+        return '%s<%s%s name="%s" module="%s"/>\n' % (
+            ' '*indent, name, id, self.name, self.module)
+
+from Shared.DC.xml import ppml
+ppml.Global = Global
+
+class Scalar:
+
+    def __init__(self, v, mapping):
+        self._v=v
+        self.mapping = mapping
+
+    def value(self): return self._v
+
+    def __str__(self, indent=0):
+        id = ''
+        name=string.lower(self.__class__.__name__)
+        result = '%s<%s%s>%s</%s>\n' % (
+            ' '*indent, name, id, self.value(), name)
+        if hasattr(self, 'id'):
+            # The value is Immutable - let us add it the the immutable mapping
+            # to reduce the number of unreadable references
+            self.mapping.setImmutable(self.id, Immutable(value = result))
+        return result
+
+ppml.Scalar = Scalar
+
+class Immutable:
+    def __init__(self, value):
+        self.value = value
+
+    def getValue(self):
+        return self.value
+
+class String(Scalar):
+    def __init__(self, v, mapping, encoding=''):
+        encoding, v = convert(v)
+        self.encoding=encoding
+        self._v=v
+        self.mapping = mapping
+    def __str__(self,indent=0,map_value=0):
+        v = self.value()
+        if map_value:
+            # This is used when strings represent references which need to be converted
+            if self.encoding == 'base64':
+                v = self.mapping.convertBase64(v)
+            else:
+                # Make sure we never produce this kind of xml output
+                raise
+        id = ''
+        encoding=''
+        if hasattr(self, 'encoding'):
+            if self.encoding != 'repr':
+                # JPS repr is default encoding
+                encoding=' encoding="%s"' % self.encoding
+        name=string.lower(self.__class__.__name__)
+        result = '%s<%s%s%s>%s</%s>\n' % (
+            ' '*indent, name, id, encoding, v, name)
+        if hasattr(self, 'id'):
+            # The value is Immutable - let us add it the the immutable mapping
+            # to reduce the number of unreadable references
+            self.mapping.setImmutable(self.id, Immutable(value = result))
+        return result
+
+ppml.String = String
+
+class Unicode(String):
+    def value(self):
+        return self._v.encode('utf-8')
+
+ppml.Unicode = Unicode
+
+class Wrapper:
+
+    def __init__(self, v, mapping):
+        self._v=v
+        self.mapping = mapping
+
+    def value(self): return self._v
+
+    def __str__(self, indent=0):
+        id = ''
+        if hasattr(self, 'id'):
+            if self.mapping.isMarked(self.id): id=' id="%s"' % self.mapping[self.id]
+        name=string.lower(self.__class__.__name__)
+        v=self._v
+        i=' '*indent
+        if isinstance(v,Scalar):
+            return '%s<%s%s> %s </%s>\n' % (i, name, id, str(v)[:-1], name)
+        else:
+            v=v.__str__(indent+2)
+            return '%s<%s%s>\n%s%s</%s>\n' % (i, name, id, v, i, name)
+
+ppml.Wrapper = Wrapper
+
+class Collection:
+
+    def __init__(self, mapping):
+        self.mapping = mapping
+
+    def __str__(self, indent=0):
+        id = ''
+        if hasattr(self, 'id'):
+            if self.mapping.isMarked(self.id): id=' id="%s"' % self.mapping[self.id]
+        name=string.lower(self.__class__.__name__)
+        i=' '*indent
+        if self:
+            return '%s<%s%s>\n%s%s</%s>\n' % (
+                i, name, id, self.value(indent+2), i, name)
+        else:
+            return '%s<%s%s/>\n' % (i, name, id)
+
+ppml.Collection = Collection
+
+class Dictionary(Collection):
+    def __init__(self, mapping):
+        self.mapping = mapping
+        self._d=[]
+    def __len__(self): return len(self._d)
+    def __setitem__(self, k, v): self._d.append((k,v))
+    def value(self, indent):
+        #self._d.sort(lambda a, b: cmp(a[0]._v, b[0]._v)) # Sort the sequence by key JPS Improvement
+        return string.join(
+            map(lambda i, ind=' '*indent, indent=indent+4:
+                '%s<item>\n'
+                '%s'
+                '%s'
+                '%s</item>\n'
+                %
+                (ind,
+                 Key(i[0], self.mapping).__str__(indent),
+                 Value(i[1], self.mapping).__str__(indent),
+                 ind),
+                self._d
+                ),
+            '')
+
+ppml.Dictionary = Dictionary
+
+class Sequence(Collection):
+
+    def __init__(self, mapping, v=None):
+        if not v: v=[]
+        self._subs=v
+        self.mapping = mapping
+
+    def __len__(self): return len(self._subs)
+
+    def append(self, v): self._subs.append(v)
+
+    # Bugfix JPS
+    def extend(self, v): self._subs.extend(v)
+
+    def value(self, indent):
+        return string.join(map(
+            lambda v, indent=indent: v.__str__(indent),
+            self._subs),'')
+
+ppml.Sequence = Sequence
+
+class Persistent(Wrapper):
+
+    def __str__(self, indent=0):
+        id = ''
+        if hasattr(self, 'id'):
+            if self.mapping.isMarked(self.id): id=' id="%s"' % self.mapping[self.id]
+        name=string.lower(self.__class__.__name__)
+        v=self._v
+        i=' '*indent
+        if isinstance(v,String):
+            return '%s<%s%s> %s </%s>\n' % (i, name, id, v.__str__(map_value=1)[:-1], name)
+        elif isinstance(v,Scalar):
+            return '%s<%s%s> %s </%s>\n' % (i, name, id, str(v)[:-1], name)
+        else:
+            v=v.__str__(indent+2)
+            return '%s<%s%s>\n%s%s</%s>\n' % (i, name, id, v, i, name)
+
+ppml.Persistent = Persistent
+
+class Reference(Scalar):
+    def __init__(self, v, mapping):
+        self._v=v
+        self.mapping = mapping
+    def __str__(self, indent=0):
+        v=self._v
+        name=string.lower(self.__class__.__name__)
+        #LOG('Reference', 0, str(v))
+        if self.mapping.hasImmutable(v):
+          return self.mapping.getImmutable(v).getValue()
+        #LOG('noImmutable', 0, "%s mapped to %s" % (v, self.mapping[v]))
+        self.mapping.mark(v)
+        return '%s<%s id="%s"/>\n' % (' '*indent,name,self.mapping[v])
+
+ppml.Reference = Reference
+Get = Reference
+ppml.Get = Get
+
+class Object(Sequence):
+    def __init__(self, klass, args, mapping):
+        self._subs=[Klass(klass, mapping), args]
+        self.mapping = mapping
+
+    def __setstate__(self, v): self.append(State(v, self.mapping))
+
+ppml.Object = Object
+
+class IdentityMapping:
+
+    def __init__(self):
+      self.immutable = {}
+
+    def resetMapping(self):
+      pass
+
+    def __getitem__(self, id):
+      return id
+
+    def setConvertedAka(self, old, new):
+      pass
+
+    def convertBase64(self, s):
+      return s
+
+    def mark(self, v):
+      pass
+
+    def isMarked(self, v):
+      return 1
+
+    def setImmutable(self, k, v):
+      self.immutable[k] = v
+
+    def getImmutable(self, k):
+      return self.immutable[k]
+
+    def hasImmutable(self, k):
+      return self.immutable.has_key(k)
+
+
+ppml.IdentityMapping = IdentityMapping
+
+class MinimalMapping(IdentityMapping):
+    def __init__(self):
+      self.mapped_id = {}
+      self.mapped_core_id = {}
+      self.last_sub_id = {}
+      self.last_id = 1
+      self.converted_aka = {}
+      self.marked_reference = {}
+      self.immutable = {}
+
+    def resetMapping(self):
+      self.mapped_id = {}
+      self.mapped_core_id = {}
+      self.last_sub_id = {}
+      self.last_id = 1
+      self.converted_aka = {}
+      self.marked_reference = {}
+
+    def __getitem__(self, id):
+      id = str(id)
+      split_id = id.split('.')
+      if len(split_id) == 2:
+        (core_id, sub_id) = split_id
+      elif len(split_id) == 1:
+        core_id = split_id[0]
+        sub_id = None
+      else:
+        raise
+      if not self.mapped_id.has_key(core_id):
+        if sub_id is not None:
+          # Use existing id
+          self.mapped_id[core_id] = {}
+          self.mapped_core_id[core_id] = self.last_id - 1
+          self.last_sub_id[core_id] = 1
+        else:
+          # Create new core_id if not defined
+          self.mapped_id[core_id] = {}
+          self.mapped_core_id[core_id] = self.last_id
+          self.last_sub_id[core_id] = 1
+          self.last_id = self.last_id + 1
+      if sub_id is None:
+        return self.mapped_core_id[core_id]
+      if not self.mapped_id[core_id].has_key(sub_id):
+        # Create new sub_id if not defined
+        self.mapped_id[core_id][sub_id] = self.last_sub_id[core_id]
+        self.last_sub_id[core_id] = self.last_sub_id[core_id] + 1
+      return "%s.%s" % (self.mapped_core_id[core_id], self.mapped_id[core_id][sub_id])
+
+    def convertBase64(self, s):
+      return self.converted_aka.get(s, s)
+
+    def setConvertedAka(self, old, new):
+      self.converted_aka[old] =  new
+
+    def mark(self, v):
+      self.marked_reference[v] = 1
+
+    def isMarked(self, v):
+      return self.marked_reference.has_key(v)
+
+    def __str__(self, a):
+      return "Error here"
+
+ppml.MinimalMapping = MinimalMapping
+
+class List(Sequence): pass
+class Tuple(Sequence): pass
+
+class Klass(Wrapper): pass
+class State(Wrapper): pass
+class Pickle(Wrapper): pass
+
+class Int(Scalar): pass
+class Float(Scalar): pass
+
+class Key(Wrapper): pass
+class Value(Wrapper): pass
+
+class Long(Scalar):
+    def value(self):
+        result = str(self._v)
+        if result[-1:] == 'L':
+            return result[:-1]
+        return result
+
+class ToXMLUnpickler(Unpickler):
+
+    def load(self, id_mapping=None):
+      if id_mapping is None:
+        self.id_mapping = IdentityMapping()
+      else:
+        self.id_mapping = id_mapping
+      return Pickle(Unpickler.load(self), self.id_mapping)
+
+    dispatch = {}
+    dispatch.update(Unpickler.dispatch)
+
+    def persistent_load(self, v):
+        return Persistent(v, self.id_mapping)
+
+    def load_persid(self):
+        pid = self.readline()[:-1]
+        self.append(self.persistent_load(String(pid, self.id_mapping)))
+    dispatch[PERSID] = load_persid
+
+    def load_none(self):
+        self.append(none)
+    dispatch[NONE] = load_none
+
+    def load_int(self):
+        self.append(Int(string.atoi(self.readline()[:-1]), self.id_mapping))
+    dispatch[INT] = load_int
+
+    def load_binint(self):
+        self.append(Int(mloads('i' + self.read(4)), self.id_mapping))
+    dispatch[BININT] = load_binint
+
+    def load_binint1(self):
+        self.append(Int(mloads('i' + self.read(1) + '\000\000\000'), self.id_mapping))
+    dispatch[BININT1] = load_binint1
+
+    def load_binint2(self):
+        self.append(Int(mloads('i' + self.read(2) + '\000\000'), self.id_mapping))
+    dispatch[BININT2] = load_binint2
+
+    def load_long(self):
+        self.append(Long(string.atol(self.readline()[:-1], 0), self.id_mapping))
+    dispatch[LONG] = load_long
+
+    def load_float(self):
+        self.append(Float(string.atof(self.readline()[:-1]), self.id_mapping))
+    dispatch[FLOAT] = load_float
+
+    def load_binfloat(self, unpack=struct.unpack):
+        self.append(Float(unpack('>d', self.read(8))[0], self.id_mapping))
+    dispatch[BINFLOAT] = load_binfloat
+
+    def load_string(self):
+        self.append(String(eval(self.readline()[:-1],
+                                {'__builtins__': {}}), self.id_mapping)) # Let's be careful
+    dispatch[STRING] = load_string
+
+    def load_binstring(self):
+        len = mloads('i' + self.read(4))
+        self.append(String(self.read(len), self.id_mapping))
+    dispatch[BINSTRING] = load_binstring
+
+    def load_unicode(self):
+        self.append(Unicode(unicode(eval(self.readline()[:-1],
+                                         {'__builtins__': {}})), self.id_mapping)) # Let's be careful
+    dispatch[UNICODE] = load_unicode
+
+    def load_binunicode(self):
+        len = mloads('i' + self.read(4))
+        self.append(Unicode(unicode(self.read(len), 'utf-8'), self.id_mapping))
+    dispatch[BINUNICODE] = load_binunicode
+
+    def load_short_binstring(self):
+        len = mloads('i' + self.read(1) + '\000\000\000')
+        self.append(String(self.read(len), self.id_mapping))
+    dispatch[SHORT_BINSTRING] = load_short_binstring
+
+    def load_tuple(self):
+        k = self.marker()
+        #LOG('load_tuple, k',0,k)
+        #LOG('load_tuple, stack[k+1:]',0,self.stack[k+1:])
+        self.stack[k:] = [Tuple(self.id_mapping, v=self.stack[k+1:])]
+    dispatch[TUPLE] = load_tuple
+
+    def load_empty_tuple(self):
+        self.stack.append(Tuple(self.id_mapping))
+    dispatch[EMPTY_TUPLE] = load_empty_tuple
+
+    def load_empty_list(self):
+        self.stack.append(List(self.id_mapping))
+    dispatch[EMPTY_LIST] = load_empty_list
+
+    def load_empty_dictionary(self):
+        self.stack.append(Dictionary(self.id_mapping))
+    dispatch[EMPTY_DICT] = load_empty_dictionary
+
+    def load_list(self):
+        k = self.marker()
+        self.stack[k:] = [List(self.id_mapping, v=self.stack[k+1:])]
+    dispatch[LIST] = load_list
+
+    def load_dict(self):
+        k = self.marker()
+        d = Dictionary(self.id_mapping)
+        items = self.stack[k+1:]
+        for i in range(0, len(items), 2):
+            key = items[i]
+            value = items[i+1]
+            d[key] = value
+        self.stack[k:] = [d]
+    dispatch[DICT] = load_dict
+
+    def load_inst(self):
+        k = self.marker()
+        args = Tuple(self.id_mapping, v=self.stack[k+1:])
+        del self.stack[k:]
+        module = self.readline()[:-1]
+        name = self.readline()[:-1]
+        value=Object(Global(module, name, self.id_mapping), args, self.id_mapping)
+        self.append(value)
+    dispatch[INST] = load_inst
+
+    def load_obj(self):
+        stack = self.stack
+        k = self.marker()
+        klass = stack[k + 1]
+        del stack[k + 1]
+        args = Tuple(self.id_mapping, v=stack[k + 1:])
+        del stack[k:]
+        value=Object(klass,args, self.id_mapping)
+        self.append(value)
+    dispatch[OBJ] = load_obj
+
+    def load_global(self):
+        module = self.readline()[:-1]
+        name = self.readline()[:-1]
+        self.append(Global(module, name, self.id_mapping))
+    dispatch[GLOBAL] = load_global
+
+    def load_reduce(self):
+        stack = self.stack
+
+        callable = stack[-2]
+        arg_tup  = stack[-1]
+        del stack[-2:]
+
+        value=Object(callable, arg_tup, self.id_mapping)
+        self.append(value)
+    dispatch[REDUCE] = load_reduce
+
+    idprefix=''
+
+    def load_get(self):
+        self.append(Get(self.idprefix+self.readline()[:-1], self.id_mapping))
+    dispatch[GET] = load_get
+
+    def load_binget(self):
+        i = mloads('i' + self.read(1) + '\000\000\000')
+        self.append(Get(self.idprefix+`i`, self.id_mapping))
+    dispatch[BINGET] = load_binget
+
+    def load_long_binget(self):
+        i = mloads('i' + self.read(4))
+        self.append(Get(self.idprefix+`i`, self.id_mapping))
+    dispatch[LONG_BINGET] = load_long_binget
+
+    def load_put(self):
+        self.stack[-1].id=self.idprefix+self.readline()[:-1]
+    dispatch[PUT] = load_put
+
+    def load_binput(self):
+        i = mloads('i' + self.read(1) + '\000\000\000')
+        #LOG('load_binput', 0, 'self.stack = %r, self.idprefix+`i` = %r' % (self.stack, self.idprefix+`i`))
+        self.stack[-1].id=self.idprefix+`i`
+    dispatch[BINPUT] = load_binput
+
+    def load_long_binput(self):
+        i = mloads('i' + self.read(4))
+        self.stack[-1].id=self.idprefix+`i`
+    dispatch[LONG_BINPUT] = load_long_binput
+
+    class LogCall:
+      def __init__(self, func):
+        self.func = func
+
+      def __call__(self, context):
+        #LOG('LogCall', 0, 'self.stack = %r, func = %s' % (context.stack, self.func.__name__))
+        return self.func(context)
+
+    #for code in dispatch.keys():
+    #  dispatch[code] = LogCall(dispatch[code])
+
+ppml.ToXMLUnpickler = ToXMLUnpickler
+
+def end_string(self, tag, data):
+    v=data[2]
+    a=data[1]
+    encoding = a.get('encoding','repr') # JPS: repr is default encoding
+    if encoding != '': # Bugfix since (is was used on string)
+        v=unconvert(encoding,v)
+    if a.has_key('id'): self._pickleids[a['id']]=v
+    return v
+
+ppml.end_string = end_string
+
+def end_unicode(self, tag, data):
+    return unicode(end_string(self, tag, data), 'utf-8')
+
+ppml.end_unicode = end_unicode
+
+class xmlUnpickler(NoBlanks, xyap):
+    start_handlers={'pickle': start_pickle}
+    end_handlers={
+        'int':
+        lambda self,tag,data,atoi=string.atoi,name=name:
+            atoi(name(self, tag, data)),
+        'long':
+        lambda self,tag,data,atoi=string.atoi,name=name:
+            atoi(name(self, tag, data)),
+        'boolean':
+        lambda self,tag,data,atoi=string.atoi,name=name:
+            atoi(name(self, tag, data)),
+        'string': end_string ,
+        'unicode': end_unicode ,
+        'double':
+        lambda self,tag,data,atof=string.atof,name=name:
+            atof(name(self, tag, data)),
+        'float':
+        lambda self,tag,data,atof=string.atof,name=name:
+            atof(name(self, tag, data)),
+        'none': lambda self, tag, data: None,
+        'list': end_list,
+        'tuple': end_tuple,
+        'dictionary': end_dictionary,
+        'key': lambda self, tag, data: data[2],
+        'value': lambda self, tag, data: data[2],
+        'item': lambda self, tag, data: data[2:],
+        'reference': lambda self, tag, data: self._pickleids[data[1]['id']],
+        'state': lambda self, tag, data: data[2],
+        'klass': lambda self, tag, data: data[2],
+        }
+
+ppml.xmlUnpickler = xmlUnpickler
+
+def save_string(self, tag, data):
+    binary=self.binary
+    v=''
+    a=data[1]
+    if len(data)>2:
+        for x in data[2:]:
+            v=v+x
+    encoding=a.get('encoding','repr') # JPS: repr is default encoding
+    if encoding is not '':
+        v=unconvert(encoding,v)
+    put='p'
+    if binary:
+        l=len(v)
+        s=mdumps(l)[1:]
+        if (l<256):
+            v='U'+s[0]+v
+        else:
+            v='T'+s+v
+        put='q'
+    else: v="S'"+v+"'\012"
+    return save_put(self, v, a)
+
+ppml.save_string = save_string
+
+def save_unicode(self, tag, data):
+    binary=self.binary
+    v=''
+    a=data[1]
+    if len(data)>2:
+        for x in data[2:]:
+            v=v+x
+    encoding=a.get('encoding','repr') # JPS: repr is default encoding
+    if encoding is not '':
+        v=unconvert(encoding,v)
+    if binary:
+        l=len(v)
+        s=mdumps(l)[1:]
+        v=BINUNICODE+s+v
+    else: v=UNICODE+"'"+v+"'\012"
+    return save_put(self, v, a)
+
+ppml.save_unicode = save_unicode
+
+class xmlPickler(NoBlanks, xyap):
+    start_handlers={
+        'pickle': lambda self, tag, attrs: [tag, attrs],
+        }
+    end_handlers={
+        'pickle': lambda self, tag, data: data[2]+'.',
+        'none': lambda self, tag, data: 'N',
+        'int': save_int,
+        'long': lambda self, tag, data: 'L'+data[2]+'L\012',
+        'float': save_float,
+        'string': save_string,
+        'unicode': save_unicode,
+        'reference': save_reference,
+        'tuple': save_tuple,
+        'list': save_list,
+        'dictionary': save_dict,
+        'item': lambda self, tag, data, j=string.join: j(data[2:],''),
+        'value': lambda self, tag, data: data[2],
+        'key' : lambda self, tag, data: data[2],
+        'object': save_object,
+        'klass': lambda self, tag, data: data[2],
+        'state': lambda self, tag, data: data[2],
+        'global': save_global,
+        'persistent': save_persis,
+        }
+
+ppml.xmlPickler = xmlPickler
+
+class Tuple(Sequence): pass
+
+ppml.Tuple = Tuple
diff --git a/product/ERP5Type/patches/sqlvar.py b/product/ERP5Type/patches/sqlvar.py
new file mode 100755
index 0000000000..78df7fba46
--- /dev/null
+++ b/product/ERP5Type/patches/sqlvar.py
@@ -0,0 +1,114 @@
+##############################################################################
+#
+# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
+# Copyright (c) 2002,2005 Nexedi SARL and Contributors. All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+# dtml-sqlvar patch to convert None to NULL, and deal with DateTime
+
+from Shared.DC.ZRDB.sqlvar import SQLVar
+from Shared.DC.ZRDB import sqlvar
+from string import atoi,atof
+from types import StringType
+
+def SQLVar_render(self, md):
+    name=self.__name__
+    args=self.args
+    t=args['type']
+    try:
+        expr=self.expr
+        if type(expr) is type(''): v=md[expr]
+        else: v=expr(md)
+    except:
+        if args.has_key('optional') and args['optional']:
+            return 'null'
+        if type(expr) is not type(''):
+            raise
+        raise ValueError, 'Missing input variable, <em>%s</em>' % name
+
+    if t=='int':
+        try:
+            if type(v) is StringType:
+                if v[-1:]=='L':
+                    v=v[:-1]
+                atoi(v)
+            else: v=str(int(v))
+        except:
+            if not v and args.has_key('optional') and args['optional']:
+                return 'null'
+            raise ValueError, (
+                'Invalid integer value for <em>%s</em>' % name)
+    elif t=='float':
+        try:
+            if type(v) is StringType:
+                if v[-1:]=='L':
+                    v=v[:-1]
+                atof(v)
+            else: v=str(float(v))
+        except:
+            if not v and args.has_key('optional') and args['optional']:
+                return 'null'
+            raise ValueError, (
+                'Invalid floating-point value for <em>%s</em>' % name)
+    # Patched by yo
+    elif t=='datetime':
+        if v is None:
+            if args.has_key('optional') and args['optional']:
+                return 'null'
+            else:
+                raise ValueError, (
+                    'Invalid datetime value for <em>%s</em>: %r' % (name, v))
+
+        try:
+            if hasattr(v, 'ISO'):
+                v=v.ISO()
+            if hasattr(v, 'strftime'):
+                v=v.strftime('%Y-%m-%d %H:%M:%S')
+            else: v=str(v)
+        except:
+            if not v and args.has_key('optional') and args['optional']:
+                return 'null'
+            raise ValueError, (
+                'Invalid datetime value for <em>%s</em>: %r' % (name, v))
+
+        v=md.getitem('sql_quote__',0)(v)
+    # End of patch
+    else:
+        # Patched by yo
+        if v is None:
+            if args.has_key('optional') and args['optional']:
+                return 'null'
+            else:
+                raise ValueError, (
+                    'Invalid string value for <em>%s</em>' % name)
+        # End of patch
+
+        if not isinstance(v, (str, unicode)):
+            v=str(v)
+        if not v and t=='nb':
+            if args.has_key('optional') and args['optional']:
+                return 'null'
+            else:
+                raise ValueError, (
+                    'Invalid empty string value for <em>%s</em>' % name)
+
+        v=md.getitem('sql_quote__',0)(v)
+        #if find(v,"\'") >= 0: v=join(split(v,"\'"),"''")
+        #v="'%s'" % v
+
+    return v
+
+# Patched by yo. datetime is added.
+valid_type={'int':1, 'float':1, 'string':1, 'nb': 1, 'datetime' : 1}.has_key
+
+SQLVar.render = SQLVar_render
+SQLVar.__call__ = SQLVar_render
+sqlvar.valid_type = valid_type
-- 
2.30.9