Commit a0408a0a authored by Julien Muchembled's avatar Julien Muchembled

wendelin-scalability: new test-neo SR

parent 42313409
[buildout]
extends =
../wendelin/software.cfg
../../component/mariadb/mariarocks.cfg
[local-bt5-repository]
list += ${slapos.cookbook-repository:location}/software/wendelin-scalability
[patch-template]
recipe = slapos.recipe.build
location = ${buildout:directory}/${:_buildout_section_name_}.cfg.in
script =
with open(self.options['location'], 'w') as dst, \
open(self.options['base']) as src:
src = src.read()
i = src.index('[buildout]')
dst.write(src[:i] + self.options['extra'] + '\n' + src[i:])
......@@ -33,7 +33,7 @@ tags =
[feeder]
feeder =
#
# Same algorithm as ERP5Site_simulateFluentdIngestion
import os, struct
from random import lognormvariate
pack = struct.Struct('!d').pack
......
[buildout]
extends =
../../component/gnupg/buildout.cfg
../fluentd/software.cfg
../wendelin/software.cfg
../../component/mariadb/mariarocks.cfg
[local-bt5-repository]
list += ${slapos.cookbook-repository:location}/software/wendelin-scalability
[patch-template]
recipe = slapos.recipe.build
location = ${buildout:directory}/${:_buildout_section_name_}.cfg.in
script =
with open(self.options['location'], 'w') as dst, \
open(self.options['base']) as src:
src = src.read()
i = src.index('[buildout]')
dst.write(src[:i] + self.options['extra'] + '\n' + src[i:])
test-common.cfg
[template-erp5]
recipe =
......
# NEO: data deduplication must be enabled
[buildout]
extends = test-fluentd-common.cfg
extends =
../../component/gnupg/buildout.cfg
test-fluentd-common.cfg
[template-erp5-patched]
extra =
......
# The 'start_ingest' command causes a zope to fill NEO as fast as possible,
# as if fluentd pushed data.
#
# Use sigma > 0 to have oids of variable size inside NEO.
# To know average compression ratio:
# x=test_scalability_fluentd/ExtensionTemplateItem/portal_components/extension.erp5.ScalabilityFluentd.py
# $x 10 1
# 0.434851958247
# 2155 - 65536 (99th percentile)
# $x 8.787 0
# 0.100036621094
# 6556
[buildout]
extends = test-common.cfg
parts += start_ingest
[start_ingest]
recipe = slapos.recipe.template:jinja2
rendered = ${buildout:bin-directory}/${:_buildout_section_name_}
mode = 0755
template =
inline:#!${buildout:executable}
import argparse, base64, httplib, sys
parser = argparse.ArgumentParser()
_ = parser.add_argument
_('--site-id', default='erp5')
_('hostport', metavar='host[:port]', help='Zope address')
_('password', help="'zope' user password")
_('reference', help='Data Stream reference')
_('mu', type=float)
_('sigma', type=float)
_('chunks_per_transaction', nargs='?', type=int, help='default: 128 (8 MiB)')
args = parser.parse_args()
qs = []
for k in 'reference', 'mu', 'sigma', 'chunks_per_transaction':
v = getattr(args, k)
if v is not None:
t = type(v)
qs.append('%s=%s' % (k if t is str else k + ':' + t.__name__, v))
c = httplib.HTTPConnection(args.hostport)
c.putrequest('GET', '/%s/ERP5Site_simulateFluentdIngestion?%s'
% (args.site_id, '&'.join(qs)))
c.putheader('Authorization',
'Basic ' + base64.b64encode('zope:'+args.password))
c.endheaders()
#!/usr/bin/python
from __future__ import division, print_function
import os, struct
from random import lognormvariate
bigfile_chunk_size = 65536
def simulateFluentdIngestion(self, reference, mu, sigma,
chunks_per_transaction=128):
from time import time
import transaction
note = (self['portal_ingestion_policies']['scalability_test_unpack'].getPath()
+ '/ingest')
module = self['data_stream_module']
try:
data_stream = module[reference]
except KeyError:
data_stream = module.newContent(reference, 'Data Stream')
transaction.commit()
pack = struct.Struct('!d').pack
data = os.urandom(bigfile_chunk_size - 8)
while 1:
txn = transaction.begin()
data_stream.appendData(''.join(
(pack(time()) + data[:int(lognormvariate(mu, sigma))]
).ljust(bigfile_chunk_size, '\0')
for _ in xrange(chunks_per_transaction)))
txn.note(note)
txn.commit()
if __name__ == '__main__':
import sys
mu, sigma = map(float, sys.argv[1:3])
if sigma:
try:
n = int(sys.argv[3])
except IndexError:
n = 1000000
else:
n = 1
x = sorted(min(int(lognormvariate(mu, sigma)), bigfile_chunk_size - 8)
for _ in xrange(n))
print((8 * n + sum(x)) / (bigfile_chunk_size * n))
if n == 1:
print(x[0] + 8)
else:
n //= 100
if n:
print(8 + x[n], '-', 8 + x[-n-1], '(99th percentile)')
else:
print(8 + x[0], '-', 8 + x[-1])
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="Extension Component" module="erp5.portal_type"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_recorded_property_dict</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAI=</string> </persistent>
</value>
</item>
<item>
<key> <string>default_reference</string> </key>
<value> <string>ScalabilityFluentd</string> </value>
</item>
<item>
<key> <string>description</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>extension.erp5.ScalabilityFluentd</string> </value>
</item>
<item>
<key> <string>portal_type</string> </key>
<value> <string>Extension Component</string> </value>
</item>
<item>
<key> <string>sid</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>text_content_error_message</string> </key>
<value>
<tuple/>
</value>
</item>
<item>
<key> <string>text_content_warning_message</string> </key>
<value>
<tuple>
<string>W: 8, 46: Redefining name \'mu\' from outer scope (line 35) (redefined-outer-name)</string>
<string>W: 8, 50: Redefining name \'sigma\' from outer scope (line 35) (redefined-outer-name)</string>
</tuple>
</value>
</item>
<item>
<key> <string>version</string> </key>
<value> <string>erp5</string> </value>
</item>
<item>
<key> <string>workflow_history</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAM=</string> </persistent>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="2" aka="AAAAAAAAAAI=">
<pickle>
<global name="PersistentMapping" module="Persistence.mapping"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>data</string> </key>
<value>
<dictionary/>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="3" aka="AAAAAAAAAAM=">
<pickle>
<global name="PersistentMapping" module="Persistence.mapping"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>data</string> </key>
<value>
<dictionary>
<item>
<key> <string>component_validation_workflow</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAQ=</string> </persistent>
</value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="4" aka="AAAAAAAAAAQ=">
<pickle>
<global name="WorkflowHistoryList" module="Products.ERP5Type.patches.WorkflowTool"/>
</pickle>
<pickle>
<tuple>
<none/>
<list>
<dictionary>
<item>
<key> <string>action</string> </key>
<value> <string>validate</string> </value>
</item>
<item>
<key> <string>validation_state</string> </key>
<value> <string>validated</string> </value>
</item>
</dictionary>
</list>
</tuple>
</pickle>
</record>
</ZopeData>
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="ExternalMethod" module="Products.ExternalMethod.ExternalMethod"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_function</string> </key>
<value> <string>simulateFluentdIngestion</string> </value>
</item>
<item>
<key> <string>_module</string> </key>
<value> <string>ScalabilityFluentd</string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>ERP5Site_simulateFluentdIngestion</string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
extension.erp5.ScalabilityFluentd
\ No newline at end of file
portal_ingestion_policies/scalability_test_*
portal_skins/custom/DataStreamModule_getTotalSize
portal_skins/custom/ERP5Site_handleRawDataFluentdIngestion
\ No newline at end of file
portal_skins/custom/ERP5Site_handleRawDataFluentdIngestion
portal_skins/custom/ERP5Site_simulateFluentdIngestion
\ No newline at end of file
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment