Commit 7d0be444 authored by Jason Madden's avatar Jason Madden

clean up logging for production. [skip ci]

parent 31bf3b10
...@@ -94,9 +94,9 @@ def _run_cython_on_file(configuration, pyx_filename, ...@@ -94,9 +94,9 @@ def _run_cython_on_file(configuration, pyx_filename,
unique_output_filename = os.path.join(tempdir, output_filename) unique_output_filename = os.path.join(tempdir, output_filename)
dirname = os.path.dirname(unique_pyx_filename) # output must be in same dir dirname = os.path.dirname(unique_pyx_filename) # output must be in same dir
log("Output filename %s", unique_output_filename) dbg("Output filename %s", unique_output_filename)
if dirname and not os.path.exists(dirname): if dirname and not os.path.exists(dirname):
print("Making dir", dirname) dbg("Making dir %s", dirname)
os.makedirs(dirname) os.makedirs(dirname)
try: try:
atomic_write(unique_pyx_filename, py_banner + value) atomic_write(unique_pyx_filename, py_banner + value)
...@@ -108,7 +108,7 @@ def _run_cython_on_file(configuration, pyx_filename, ...@@ -108,7 +108,7 @@ def _run_cython_on_file(configuration, pyx_filename,
atomic_write(unique_output_filename + '.deb', output) atomic_write(unique_output_filename + '.deb', output)
finally: finally:
shutil.rmtree(tempdir, True) shutil.rmtree(tempdir, True)
#pass
return attach_tags(output, configuration), configuration, sourcehash return attach_tags(output, configuration), configuration, sourcehash
...@@ -124,7 +124,6 @@ def _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, prepr ...@@ -124,7 +124,6 @@ def _run_cython_on_files(pyx_filename, py_banner, banner, output_filename, prepr
counter, lines, counter, lines,
cache))) cache)))
threads[-1].start() threads[-1].start()
#threads[-1].join()
for t in threads: for t in threads:
t.join() t.join()
...@@ -191,7 +190,7 @@ def process_filename(filename, output_filename=None): ...@@ -191,7 +190,7 @@ def process_filename(filename, output_filename=None):
result = generate_merged(sources) result = generate_merged(sources)
result_hash = md5(result.encode("utf-8")).hexdigest() result_hash = md5(result.encode("utf-8")).hexdigest()
atomic_write(output_filename, result) atomic_write(output_filename, result)
log('%s bytes (hash %s)\n', len(result), result_hash) log('%s bytes\n', len(result))
if filename != pyx_filename: if filename != pyx_filename:
log('Saving %s', pyx_filename) log('Saving %s', pyx_filename)
...@@ -306,29 +305,21 @@ def merge(sources): ...@@ -306,29 +305,21 @@ def merge(sources):
Str('everyone\n', [set([('defined(world)', False)])])] Str('everyone\n', [set([('defined(world)', False)])])]
""" """
sources = list(sources) # own copy sources = list(sources) # own copy
log("Merging %s", len(sources)) dbg("Merging %s", len(sources))
if len(sources) <= 1: if len(sources) <= 1:
return [Str(str(x), simplify_tags(x.tags)) for x in sources[0]] return [Str(str(x), simplify_tags(x.tags)) for x in sources[0]]
#return merge([_merge(sources[0], sources[1])] + sources[2:])
pool = multiprocessing.Pool() pool = multiprocessing.Pool()
# class SerialPool(object):
# def imap(self, func, iterable):
# for args in iterable:
# yield func(*args)
#pool = SerialPool()
groups = [] groups = []
while len(sources) >= 2: while len(sources) >= 2:
one, two = sources.pop(), sources.pop() one, two = sources.pop(), sources.pop()
groups.append((one, two)) groups.append((one, two))
log("Merge groups %s", len(groups)) dbg("Merge groups %s", len(groups))
# len sources == 0 or 1 # len sources == 0 or 1
for merged in pool.imap(_merge, groups): for merged in pool.imap(_merge, groups):
log("Completed a merge in %s", os.getpid()) dbg("Completed a merge in %s", os.getpid())
sources.append(merged) sources.append(merged)
# len sources == 1 or 2 # len sources == 1 or 2
...@@ -338,17 +329,15 @@ def merge(sources): ...@@ -338,17 +329,15 @@ def merge(sources):
# len sources == 1 # len sources == 1
# len sources should now be 1 # len sources should now be 1
print("Now merging", len(sources)) dbg("Now merging %s", len(sources))
return merge(sources) return merge(sources)
def _merge(*args): def _merge(*args):
#log("imerging %s", len(args))
if isinstance(args[0], tuple): if isinstance(args[0], tuple):
a, b = args[0] a, b = args[0]
else: else:
a, b = args a, b = args
#log("Merging %s and %s (%s %s) in %s", id(a), id(b), len(a), len(b), os.getpid())
return list(_imerge(a, b)) return list(_imerge(a, b))
def _flatten(tags): def _flatten(tags):
...@@ -359,7 +348,6 @@ def _flatten(tags): ...@@ -359,7 +348,6 @@ def _flatten(tags):
def _imerge(a, b): def _imerge(a, b):
# caching the tags speeds up serialization and future merges # caching the tags speeds up serialization and future merges
flat_tag_cache = {}
tag_cache = {} tag_cache = {}
for tag, i1, i2, j1, j2 in difflib.SequenceMatcher(None, a, b).get_opcodes(): for tag, i1, i2, j1, j2 in difflib.SequenceMatcher(None, a, b).get_opcodes():
if tag == 'equal': if tag == 'equal':
...@@ -408,19 +396,23 @@ def expand_to_match(items): ...@@ -408,19 +396,23 @@ def expand_to_match(items):
def produce_preprocessor(iterable): def produce_preprocessor(iterable):
if DEBUG:
current_line = [0] current_line = [0]
def wrap(line, log=True): def wrap(line):
current_line[0] += 1 current_line[0] += 1
dbg('%5d: %s', current_line[0], repr(str(line))[1:-1]) dbg('%5d: %s', current_line[0], repr(str(line))[1:-1])
return line return line
else:
def wrap(line):
return line
state = None state = None
for line in iterable: for line in iterable:
key = line.tags or None key = line.tags or None
if key == state: if key == state:
yield wrap(line, key) yield wrap(line)
else: else:
if exact_reverse(key, state): if exact_reverse(key, state):
yield wrap('#else /* %s */\n' % format_tags(state)) yield wrap('#else /* %s */\n' % format_tags(state))
...@@ -429,7 +421,7 @@ def produce_preprocessor(iterable): ...@@ -429,7 +421,7 @@ def produce_preprocessor(iterable):
yield wrap('#endif /* %s */\n' % format_tags(state)) yield wrap('#endif /* %s */\n' % format_tags(state))
if key: if key:
yield wrap('#if %s\n' % format_tags(key)) yield wrap('#if %s\n' % format_tags(key))
yield wrap(line, key) yield wrap(line)
state = key state = key
if state: if state:
yield wrap('#endif /* %s */\n' % format_tags(state)) yield wrap('#endif /* %s */\n' % format_tags(state))
...@@ -687,7 +679,7 @@ def atomic_write(filename, data): ...@@ -687,7 +679,7 @@ def atomic_write(filename, data):
def run_cython(filename, sourcehash, output_filename, banner, comment, cache=None): def run_cython(filename, sourcehash, output_filename, banner, comment, cache=None):
log("Cython output to %s hash %s", output_filename, sourcehash) dbg("Cython output to %s hash %s", output_filename, sourcehash)
result = cache.get(sourcehash) if cache is not None else None result = cache.get(sourcehash) if cache is not None else None
command = '%s -o %s -I gevent %s' % (CYTHON, pipes.quote(output_filename), pipes.quote(filename)) command = '%s -o %s -I gevent %s' % (CYTHON, pipes.quote(output_filename), pipes.quote(filename))
if result is not None: if result is not None:
...@@ -704,7 +696,7 @@ def system(command, comment): ...@@ -704,7 +696,7 @@ def system(command, comment):
log('Running %s # %s', command, comment) log('Running %s # %s', command, comment)
try: try:
subprocess.check_call(command, shell=True) subprocess.check_call(command, shell=True)
log('\tDone running %s # %s', command, comment) dbg('\tDone running %s # %s', command, comment)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
# debugging code # debugging code
log("Path: %s", os.getenv("PATH")) log("Path: %s", os.getenv("PATH"))
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment