Commit eb371277 authored by Martin Manchev's avatar Martin Manchev Committed by Ivan Tyagov

Revert "Changes in 'erp5_wendelin_data_lake' ..."

This reverts commit 004be34a439b81c2e658ed0d30ab2e1946924822.
parent 40ee4d19
...@@ -115,9 +115,9 @@ ...@@ -115,9 +115,9 @@
<key> <string>categories</string> </key> <key> <string>categories</string> </key>
<value> <value>
<tuple> <tuple>
<string>specialise/portal_templates/54</string>
<string>resource/portal_workflow/erp5_wendelin_workflow</string> <string>resource/portal_workflow/erp5_wendelin_workflow</string>
<string>current_state/portal_workflow/erp5_wendelin_workflow/state_start</string> <string>current_state/portal_workflow/erp5_wendelin_workflow/state_start</string>
<string>specialise/portal_templates/54</string>
</tuple> </tuple>
</value> </value>
</item> </item>
...@@ -136,9 +136,11 @@ ...@@ -136,9 +136,11 @@
<value> <value>
<object> <object>
<klass> <klass>
<global id="1.1" name="DateTime" module="DateTime.DateTime"/> <global id="1.1" name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple> <tuple>
<global id="1.2" name="DateTime" module="DateTime.DateTime"/>
<global id="1.3" name="object" module="__builtin__"/>
<none/> <none/>
</tuple> </tuple>
<state> <state>
...@@ -170,6 +172,8 @@ ...@@ -170,6 +172,8 @@
<object> <object>
<klass> <reference id="1.1"/> </klass> <klass> <reference id="1.1"/> </klass>
<tuple> <tuple>
<reference id="1.2"/>
<reference id="1.3"/>
<none/> <none/>
</tuple> </tuple>
<state> <state>
......
""" """
This script will be called to apply the customization. This script will be called to apply the customization.
""" """
from erp5.component.module.Log import log from erp5.component.module.Log import log
# Activate the knowledge pads on portal home to enable later the Wendelin # Activate the knowledge pads on portal home to enable later the Wendelin
# Information gadget. # Information gadget.
portal = context.getPortalObject() portal = context.getPortalObject()
default_site_preference = getattr(portal.portal_preferences, default_site_preference = getattr(portal.portal_preferences,
...@@ -39,7 +39,7 @@ if default_security_model_business_template is not None: ...@@ -39,7 +39,7 @@ if default_security_model_business_template is not None:
portal_type_instance = getattr(portal.portal_types, portal_type) portal_type_instance = getattr(portal.portal_types, portal_type)
print "Updated Role Mappings for %s" %portal_type print "Updated Role Mappings for %s" %portal_type
portal_type_instance.updateRoleMapping() portal_type_instance.updateRoleMapping()
# updata local roles (if any) # updata local roles (if any)
business_template = context.getSpecialiseValue() business_template = context.getSpecialiseValue()
......
...@@ -6,18 +6,18 @@ ...@@ -6,18 +6,18 @@
</pickle> </pickle>
<pickle> <pickle>
<dictionary> <dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item> <item>
<key> <string>_bind_names</string> </key> <key> <string>_bind_names</string> </key>
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
...@@ -6,18 +6,18 @@ ...@@ -6,18 +6,18 @@
</pickle> </pickle>
<pickle> <pickle>
<dictionary> <dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item> <item>
<key> <string>_bind_names</string> </key> <key> <string>_bind_names</string> </key>
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
...@@ -6,18 +6,18 @@ ...@@ -6,18 +6,18 @@
</pickle> </pickle>
<pickle> <pickle>
<dictionary> <dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item> <item>
<key> <string>_bind_names</string> </key> <key> <string>_bind_names</string> </key>
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
...@@ -6,18 +6,18 @@ ...@@ -6,18 +6,18 @@
</pickle> </pickle>
<pickle> <pickle>
<dictionary> <dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item> <item>
<key> <string>_bind_names</string> </key> <key> <string>_bind_names</string> </key>
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
...@@ -11,9 +11,13 @@ ...@@ -11,9 +11,13 @@
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
...@@ -6,18 +6,18 @@ ...@@ -6,18 +6,18 @@
</pickle> </pickle>
<pickle> <pickle>
<dictionary> <dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item> <item>
<key> <string>_bind_names</string> </key> <key> <string>_bind_names</string> </key>
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
...@@ -6,18 +6,18 @@ ...@@ -6,18 +6,18 @@
</pickle> </pickle>
<pickle> <pickle>
<dictionary> <dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item> <item>
<key> <string>_bind_names</string> </key> <key> <string>_bind_names</string> </key>
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
...@@ -11,9 +11,13 @@ ...@@ -11,9 +11,13 @@
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
...@@ -11,9 +11,13 @@ ...@@ -11,9 +11,13 @@
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
...@@ -6,18 +6,18 @@ ...@@ -6,18 +6,18 @@
</pickle> </pickle>
<pickle> <pickle>
<dictionary> <dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item> <item>
<key> <string>_bind_names</string> </key> <key> <string>_bind_names</string> </key>
<value> <value>
<object> <object>
<klass> <klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/> <global name="_reconstructor" module="copy_reg"/>
</klass> </klass>
<tuple/> <tuple>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
<global name="object" module="__builtin__"/>
<none/>
</tuple>
<state> <state>
<dictionary> <dictionary>
<item> <item>
......
from erp5.component.module.Log import log from erp5.component.module.Log import log
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery from Products.ZSQLCatalog.SQLCatalog import Query
import hashlib import hashlib
CHUNK_SIZE = 200000 CHUNK_SIZE = 200000
...@@ -14,7 +14,7 @@ def getHash(data_stream): ...@@ -14,7 +14,7 @@ def getHash(data_stream):
end_offset = n_chunk*chunk_size+chunk_size end_offset = n_chunk*chunk_size+chunk_size
try: try:
data_stream_chunk = ''.join(data_stream.readChunkList(start_offset, end_offset)) data_stream_chunk = ''.join(data_stream.readChunkList(start_offset, end_offset))
except: except Exception:
# data stream is empty # data stream is empty
data_stream_chunk = "" data_stream_chunk = ""
hash_md5.update(data_stream_chunk) hash_md5.update(data_stream_chunk)
...@@ -24,7 +24,6 @@ def getHash(data_stream): ...@@ -24,7 +24,6 @@ def getHash(data_stream):
def isInterruptedAbandonedSplitIngestion(reference): def isInterruptedAbandonedSplitIngestion(reference):
from DateTime import DateTime from DateTime import DateTime
now = DateTime()
day_hours = 1.0/24/60*60*24 day_hours = 1.0/24/60*60*24
# started split data ingestions for reference # started split data ingestions for reference
catalog_kw = {'portal_type': 'Data Ingestion', catalog_kw = {'portal_type': 'Data Ingestion',
...@@ -90,8 +89,8 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion", ...@@ -90,8 +89,8 @@ for data_ingestion in portal_catalog(portal_type = "Data Ingestion",
portal.data_stream_module.deleteContent(data_stream.getId()) portal.data_stream_module.deleteContent(data_stream.getId())
if last_data_stream_id.endswith(reference_end_split): if last_data_stream_id.endswith(reference_end_split):
portal.ERP5Site_invalidateSplitIngestions(data_ingestion.getReference(), success=True) portal.ERP5Site_invalidateSplitIngestions(data_ingestion.getReference(), success=True)
hash = getHash(full_data_stream) full_data_stream_hash = getHash(full_data_stream)
full_data_stream.setVersion(hash) full_data_stream.setVersion(full_data_stream_hash)
if full_data_stream.getValidationState() != "validated": if full_data_stream.getValidationState() != "validated":
full_data_stream.validate() full_data_stream.validate()
related_split_ingestions = portal_catalog(portal_type = "Data Ingestion", related_split_ingestions = portal_catalog(portal_type = "Data Ingestion",
......
...@@ -44,7 +44,7 @@ if data_ingestion != None: ...@@ -44,7 +44,7 @@ if data_ingestion != None:
# previous ingestion was interrumped # previous ingestion was interrumped
log(''.join(["[WARNING] User has restarted an interrumpted ingestion for reference ", data_ingestion.getReference(), ". Previous split ingestions will be discarted and full ingestion restarted."])) log(''.join(["[WARNING] User has restarted an interrumpted ingestion for reference ", data_ingestion.getReference(), ". Previous split ingestions will be discarted and full ingestion restarted."]))
portal.ERP5Site_invalidateSplitIngestions(data_ingestion.getReference(), success=False) portal.ERP5Site_invalidateSplitIngestions(data_ingestion.getReference(), success=False)
except: except Exception:
pass pass
# the ingestion attemp corresponds to a split ingestion in course, accept # the ingestion attemp corresponds to a split ingestion in course, accept
return FALSE return FALSE
......
from DateTime import DateTime from DateTime import DateTime
portal = context.getPortalObject() portal = context.getPortalObject()
portal_catalog = portal.portal_catalog portal_catalog = portal.portal_catalog
from erp5.component.module.Log import log
now = DateTime() now = DateTime()
query_dict = { query_dict = {
"portal_type": "Data Ingestion Line", "portal_type": "Data Ingestion Line",
......
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery, ComplexQuery from Products.ZSQLCatalog.SQLCatalog import Query, ComplexQuery
portal = context.getPortalObject() portal = context.getPortalObject()
portal_catalog = portal.portal_catalog portal_catalog = portal.portal_catalog
......
import json import json
portal = context.getPortalObject() portal = context.getPortalObject()
dict = {'invalid_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['invalid_suffix'], dictionary = {'invalid_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['invalid_suffix'],
'split_end_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['split_end_suffix'], 'split_end_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['split_end_suffix'],
'single_end_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['single_end_suffix'], 'single_end_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['single_end_suffix'],
'split_first_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['split_first_suffix'], 'split_first_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['split_first_suffix'],
...@@ -10,4 +10,4 @@ dict = {'invalid_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['inva ...@@ -10,4 +10,4 @@ dict = {'invalid_suffix':portal.ERP5Site_getIngestionReferenceDictionary()['inva
'reference_length':portal.ERP5Site_getIngestionReferenceDictionary()['reference_length'], 'reference_length':portal.ERP5Site_getIngestionReferenceDictionary()['reference_length'],
'invalid_chars':portal.ERP5Site_getIngestionReferenceDictionary()['invalid_chars'], 'invalid_chars':portal.ERP5Site_getIngestionReferenceDictionary()['invalid_chars'],
} }
return json.dumps(dict) return json.dumps(dictionary)
portal = context.getPortalObject() portal = context.getPortalObject()
portal_catalog = portal.portal_catalog
data_set = portal.data_set_module.get(reference) data_set = portal.data_set_module.get(reference)
if data_set is not None: if data_set is not None:
......
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery, ComplexQuery from Products.ZSQLCatalog.SQLCatalog import Query, ComplexQuery
portal = context.getPortalObject() portal = context.getPortalObject()
portal_catalog = portal.portal_catalog portal_catalog = portal.portal_catalog
...@@ -16,5 +16,5 @@ for document in portal_catalog(**kw_dict): ...@@ -16,5 +16,5 @@ for document in portal_catalog(**kw_dict):
portal.ERP5Site_invalidateReference(document) portal.ERP5Site_invalidateReference(document)
try: try:
document.invalidate() document.invalidate()
except: except Exception:
pass # fails if it's already invalidated, draft or if it doens't allow invalidation (e.g. DI) pass # fails if it's already invalidated, draft or if it doens't allow invalidation (e.g. DI)
...@@ -20,11 +20,11 @@ for data_set in portal_catalog(portal_type="Data Set", **catalog_kw): ...@@ -20,11 +20,11 @@ for data_set in portal_catalog(portal_type="Data Set", **catalog_kw):
portal.ERP5Site_invalidateIngestionObjects(data_stream.getReference()) portal.ERP5Site_invalidateIngestionObjects(data_stream.getReference())
try: try:
data_stream.invalidate() data_stream.invalidate()
except: except Exception:
pass # fails if it's already invalidated, draft or if it doens't allow invalidation (e.g. DI) pass # fails if it's already invalidated, draft or if it doens't allow invalidation (e.g. DI)
portal.ERP5Site_invalidateReference(data_set) portal.ERP5Site_invalidateReference(data_set)
try: try:
data_set.invalidate() data_set.invalidate()
except: except Exception:
pass # fails if it's already invalidated, draft or if it doens't allow invalidation (e.g. DI) pass # fails if it's already invalidated, draft or if it doens't allow invalidation (e.g. DI)
return printed return printed
...@@ -79,5 +79,5 @@ if not batch_mode: ...@@ -79,5 +79,5 @@ if not batch_mode:
return portal.Base_redirect(form_id='login_form', return portal.Base_redirect(form_id='login_form',
keep_items = dict(portal_status_message=context.Base_translateString(message_str))) keep_items = dict(portal_status_message=context.Base_translateString(message_str)))
else: else:
return json.dumps({'msg': message_str, return json.dumps({'msg': message_str,
'code':0}) 'code':0})
...@@ -2,7 +2,6 @@ import base64 ...@@ -2,7 +2,6 @@ import base64
import json import json
portal = context.getPortalObject() portal = context.getPortalObject()
portal_catalog = portal.portal_catalog
request = context.REQUEST request = context.REQUEST
data_chunk = request.get('data_chunk') data_chunk = request.get('data_chunk')
...@@ -11,10 +10,10 @@ data_set = portal.data_set_module.get(dataset) ...@@ -11,10 +10,10 @@ data_set = portal.data_set_module.get(dataset)
if data_set is not None: if data_set is not None:
decoded = base64.b64decode(data_chunk) decoded = base64.b64decode(data_chunk)
data_set.setDescription(decoded) data_set.setDescription(decoded)
dict = { 'status_code': 0, 'message': 'Dataset description successfully set.' } response = { 'status_code': 0, 'message': 'Dataset description successfully set.' }
else: else:
message = "No remote dataset found for reference '%s'" % (dataset) message = "No remote dataset found for reference '%s'" % (dataset)
dict = { 'status_code': 1, 'message': message } response = { 'status_code': 1, 'message': message }
context.logEntry(message) context.logEntry(message)
return json.dumps(dict) return json.dumps(response)
...@@ -103,7 +103,7 @@ class TestDataIngestion(SecurityTestCase): ...@@ -103,7 +103,7 @@ class TestDataIngestion(SecurityTestCase):
def stepIngest(self, extension, delimiter, randomize_ingestion_reference=False, data_set_reference=False): def stepIngest(self, extension, delimiter, randomize_ingestion_reference=False, data_set_reference=False):
file_name = "file_name.csv" file_name = "file_name.csv"
reference = self.getRandomReference() reference = self.getRandomReference()
array = [[random.random() for i in range(self.CHUNK_SIZE_CSV + 10)] for j in range(self.CHUNK_SIZE_CSV + 10)] array = [[random.random() for _ in range(self.CHUNK_SIZE_CSV + 10)] for _ in range(self.CHUNK_SIZE_CSV + 10)]
np.savetxt(file_name, array, delimiter=delimiter) np.savetxt(file_name, array, delimiter=delimiter)
chunk = [] chunk = []
with open(file_name, 'r') as csv_file: with open(file_name, 'r') as csv_file:
......
object = state_change['object'] state_change_object = state_change["object"]
object.Base_checkConsistency() state_change_object.Base_checkConsistency()
erp5_wendelin erp5_wendelin
erp5_credential erp5_credential
\ No newline at end of file
import warnings import warnings
import numpy as np import numpy as np
from keras import backend as K from keras import backend as K # pylint:disable=import-error
from keras import __version__ as keras_version from keras import __version__ as keras_version # pylint:disable=import-error
from keras.models import Sequential from keras.models import Sequential # pylint:disable=import-error
from keras.models import model_from_config from keras.models import model_from_config # pylint:disable=import-error
from keras.optimizers import optimizer_from_config from keras.optimizers import optimizer_from_config # pylint:disable=import-error
from keras import optimizers from keras import optimizers # pylint:disable=import-error
def save_model(model, model_store=None): def save_model(model, model_store=None):
data = {} data = {}
...@@ -179,7 +179,6 @@ def load_model(data): ...@@ -179,7 +179,6 @@ def load_model(data):
else: else:
model._make_train_function() model._make_train_function()
optimizer_weights_dict = data['optimizer_weights'] optimizer_weights_dict = data['optimizer_weights']
optimizer_weight_names = optimizer_weights_dict['weight_names']
optimizer_weight_values = optimizer_weights_dict['weight_values'] optimizer_weight_values = optimizer_weights_dict['weight_values']
model.optimizer.set_weights(optimizer_weight_values) model.optimizer.set_weights(optimizer_weight_values)
return model return model
\ No newline at end of file
import numpy as np import numpy as np
import time import time
import sys
import transaction import transaction
class Progbar(object): class Progbar(object):
def output(self, data): def output(self, data):
self.output1(str(data)) self.output1(str(data))
def __init__(self, target, width=30, verbose=1, interval=0.01, output=None): def __init__(self, target, width=30, verbose=1, interval=0.01, output=None):
"""Dislays a progress bar. """Dislays a progress bar.
# Arguments: # Arguments:
target: Total number of steps expected. target: Total number of steps expected.
interval: Minimum visual progress update interval (in seconds). interval: Minimum visual progress update interval (in seconds).
""" """
self.width = width self.width = width
self.target = target self.target = target
self.sum_values = {} self.sum_values = {}
self.unique_values = [] self.unique_values = []
self.start = time.time() self.start = time.time()
self.last_update = 0 self.last_update = 0
self.interval = interval self.interval = interval
self.total_width = 0 self.total_width = 0
self.seen_so_far = 0 self.seen_so_far = 0
self.verbose = verbose self.verbose = verbose
self.output1 = output self.output1 = output
def update(self, current, values=[], force=False): def update(self, current, values=None, force=False):
"""Updates the progress bar. """Updates the progress bar.
# Arguments # Arguments
current: Index of current step. current: Index of current step.
values: List of tuples (name, value_for_last_step). values: List of tuples (name, value_for_last_step).
The progress bar will display averages for these values. The progress bar will display averages for these values.
force: Whether to force visual progress update. force: Whether to force visual progress update.
""" """
for k, v in values:
if k not in self.sum_values: if values in None:
self.sum_values[k] = [v * (current - self.seen_so_far), values = []
current - self.seen_so_far]
self.unique_values.append(k) for k, v in values:
else: if k not in self.sum_values:
self.sum_values[k][0] += v * (current - self.seen_so_far) self.sum_values[k] = [v * (current - self.seen_so_far),
self.sum_values[k][1] += (current - self.seen_so_far) current - self.seen_so_far]
self.seen_so_far = current self.unique_values.append(k)
else:
now = time.time() self.sum_values[k][0] += v * (current - self.seen_so_far)
if self.verbose == 1: self.sum_values[k][1] += (current - self.seen_so_far)
if not force and (now - self.last_update) < self.interval: self.seen_so_far = current
return
now = time.time()
prev_total_width = self.total_width if self.verbose == 1:
#self.output('\b' * prev_total_width) if not force and (now - self.last_update) < self.interval:
self.output('\r') return
numdigits = int(np.floor(np.log10(self.target))) + 1 prev_total_width = self.total_width
barstr = '%%%dd/%%%dd [' % (numdigits, numdigits) #self.output('\b' * prev_total_width)
bar = barstr % (current, self.target) self.output('\r')
prog = float(current) / self.target
prog_width = int(self.width * prog) numdigits = int(np.floor(np.log10(self.target))) + 1
if prog_width > 0: barstr = '%%%dd/%%%dd [' % (numdigits, numdigits)
bar += ('=' * (prog_width - 1)) bar = barstr % (current, self.target)
if current < self.target: prog = float(current) / self.target
bar += '>' prog_width = int(self.width * prog)
else: if prog_width > 0:
bar += '=' bar += ('=' * (prog_width - 1))
bar += ('.' * (self.width - prog_width)) if current < self.target:
bar += ']' bar += '>'
self.output(bar) else:
self.total_width = len(bar) bar += '='
bar += ('.' * (self.width - prog_width))
if current: bar += ']'
time_per_unit = (now - self.start) / current self.output(bar)
else: self.total_width = len(bar)
time_per_unit = 0
eta = time_per_unit * (self.target - current) if current:
info = '' time_per_unit = (now - self.start) / current
if current < self.target: else:
info += ' - ETA: %ds' % eta time_per_unit = 0
eta = time_per_unit * (self.target - current)
info = ''
if current < self.target:
info += ' - ETA: %ds' % eta
else:
info += ' - %ds' % (now - self.start)
for k in self.unique_values:
info += ' - %s:' % k
if isinstance(self.sum_values[k], list):
avg = self.sum_values[k][0] / max(1, self.sum_values[k][1])
if abs(avg) > 1e-3:
info += ' %.4f' % avg
else: else:
info += ' - %ds' % (now - self.start) info += ' %.4e' % avg
for k in self.unique_values: else:
info += ' - %s:' % k info += ' %s' % self.sum_values[k]
if isinstance(self.sum_values[k], list):
avg = self.sum_values[k][0] / max(1, self.sum_values[k][1]) self.total_width += len(info)
if abs(avg) > 1e-3: if prev_total_width > self.total_width:
info += ' %.4f' % avg info += ((prev_total_width - self.total_width) * ' ')
else:
info += ' %.4e' % avg self.output(info)
else:
info += ' %s' % self.sum_values[k] if current >= self.target:
self.output('\r\n')
self.total_width += len(info)
if prev_total_width > self.total_width: if self.verbose == 2:
info += ((prev_total_width - self.total_width) * ' ') if current >= self.target:
info = '%ds' % (now - self.start)
self.output(info) for k in self.unique_values:
info += ' - %s:' % k
if current >= self.target: avg = self.sum_values[k][0] / max(1, self.sum_values[k][1])
self.output('\r\n') if avg > 1e-3:
info += ' %.4f' % avg
if self.verbose == 2: else:
if current >= self.target: info += ' %.4e' % avg
info = '%ds' % (now - self.start) self.output(info + "\r\n")
for k in self.unique_values:
info += ' - %s:' % k self.last_update = now
avg = self.sum_values[k][0] / max(1, self.sum_values[k][1])
if avg > 1e-3: def add(self, n, values=None):
info += ' %.4f' % avg if values is None:
else: values = []
info += ' %.4e' % avg
self.output(info + "\r\n") self.update(self.seen_so_far + n, values)
self.last_update = now
from keras.callbacks import ProgbarLogger as OriginalProgbarLogger # pylint:disable=import-error
def add(self, n, values=[]):
self.update(self.seen_so_far + n, values)
from keras.callbacks import ProgbarLogger as OriginalProgbarLogger
class ProgbarLogger(OriginalProgbarLogger): class ProgbarLogger(OriginalProgbarLogger):
...@@ -162,10 +168,10 @@ def train(portal): ...@@ -162,10 +168,10 @@ def train(portal):
# 1. you can use keras. # 1. you can use keras.
# 2. you can save trained model. # 2. you can save trained model.
# 3. you can load trained model. # 3. you can load trained model.
from cStringIO import StringIO # from cStringIO import StringIO
import tensorflow as tf import tensorflow as tf # pylint:disable=import-error
sess = tf.Session() sess = tf.Session()
from keras import backend as K from keras import backend as K # pylint:disable=import-error
K.set_session(sess) K.set_session(sess)
stream = portal.data_stream_module.wendelin_examples_keras_log stream = portal.data_stream_module.wendelin_examples_keras_log
...@@ -176,8 +182,8 @@ def train(portal): ...@@ -176,8 +182,8 @@ def train(portal):
if saved_model_data is not None: if saved_model_data is not None:
model = portal.keras_load_model(saved_model_data) model = portal.keras_load_model(saved_model_data)
else: else:
from keras.models import Sequential from keras.models import Sequential # pylint:disable=import-error
from keras.layers import Dense from keras.layers import Dense # pylint:disable=import-error
model = Sequential() model = Sequential()
model.add(Dense(12, input_dim=8, init='uniform', activation='relu')) model.add(Dense(12, input_dim=8, init='uniform', activation='relu'))
model.add(Dense(8, init='uniform', activation='relu')) model.add(Dense(8, init='uniform', activation='relu'))
......
from keras.applications.vgg16 import VGG16, preprocess_input, decode_predictions from keras.applications.vgg16 import VGG16, preprocess_input, decode_predictions # pylint:disable=import-error
from keras.preprocessing import image from keras.preprocessing import image # pylint:disable=import-error
import numpy as np import numpy as np
from cStringIO import StringIO from cStringIO import StringIO
import PIL.Image import PIL.Image
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment