Commit 53e4c102 authored by Dag Sverre Seljebotn's avatar Dag Sverre Seljebotn

Refactored pipeline construction to Pipeline.py

parent 1612aab0
......@@ -16,6 +16,7 @@ from Cython.Compiler.Main import Context, CompilationOptions, default_options
from Cython.Compiler.ParseTreeTransforms import CythonTransform, SkipDeclarations, AnalyseDeclarationsTransform
from Cython.Compiler.TreeFragment import parse_from_strings
from Cython.Build.Dependencies import strip_string_literals, cythonize
from Cython.Compiler import Pipeline
# A utility function to convert user-supplied ASCII strings to unicode.
if sys.version_info[0] < 3:
......@@ -43,7 +44,7 @@ def unbound_symbols(code, context=None):
context = Context([], default_options)
from Cython.Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
tree = parse_from_strings('(tree fragment)', code)
for phase in context.create_pipeline(pxd=False):
for phase in Pipeline.create_pipeline(context, 'pyx'):
if phase is None:
continue
tree = phase(tree)
......
......@@ -14,7 +14,6 @@ except NameError:
from sets import Set as set
import itertools
from time import time
import Code
import Errors
......@@ -37,17 +36,6 @@ module_name_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_
verbose = 0
def dumptree(t):
# For quick debugging in pipelines
print t.dump()
return t
def abort_on_errors(node):
# Stop the pipeline if there are any errors.
if Errors.num_errors != 0:
raise AbortError, "pipeline break"
return node
class CompilationData(object):
# Bundles the information that is passed from transform to transform.
# (For now, this is only)
......@@ -102,205 +90,23 @@ class Context(object):
self.future_directives.add(unicode_literals)
self.modules['builtins'] = self.modules['__builtin__']
def create_pipeline(self, pxd, py=False):
from Visitor import PrintTree
from ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
from ParseTreeTransforms import ForwardDeclareTypes, AnalyseDeclarationsTransform
from ParseTreeTransforms import AnalyseExpressionsTransform
from ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from ParseTreeTransforms import InterpretCompilerDirectives, TransformBuiltinMethods
from ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
from TypeInference import MarkAssignments, MarkOverflowingArithmetic
from ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions
from ParseTreeTransforms import RemoveUnreachableCode, GilCheck
from FlowControl import CreateControlFlowGraph
from AnalysedTreeTransforms import AutoTestDictTransform
from AutoDocTransforms import EmbedSignature
from Optimize import FlattenInListTransform, SwitchTransform, IterationTransform
from Optimize import EarlyReplaceBuiltinCalls, OptimizeBuiltinCalls
from Optimize import ConstantFolding, FinalOptimizePhase
from Optimize import DropRefcountingTransform
from Buffer import IntroduceBufferAuxiliaryVars
from ModuleNode import check_c_declarations, check_c_declarations_pxd
if pxd:
_check_c_declarations = check_c_declarations_pxd
_specific_post_parse = PxdPostParse(self)
else:
_check_c_declarations = check_c_declarations
_specific_post_parse = None
if py and not pxd:
_align_function_definitions = AlignFunctionDefinitions(self)
else:
_align_function_definitions = None
return [
NormalizeTree(self),
PostParse(self),
_specific_post_parse,
InterpretCompilerDirectives(self, self.compiler_directives),
ParallelRangeTransform(self),
AdjustDefByDirectives(self),
MarkClosureVisitor(self),
_align_function_definitions,
RemoveUnreachableCode(self),
ConstantFolding(),
FlattenInListTransform(),
WithTransform(self),
DecoratorTransform(self),
ForwardDeclareTypes(self),
AnalyseDeclarationsTransform(self),
AutoTestDictTransform(self),
EmbedSignature(self),
EarlyReplaceBuiltinCalls(self), ## Necessary?
TransformBuiltinMethods(self), ## Necessary?
CreateControlFlowGraph(self),
RemoveUnreachableCode(self),
MarkAssignments(self),
MarkOverflowingArithmetic(self),
IntroduceBufferAuxiliaryVars(self),
_check_c_declarations,
AnalyseExpressionsTransform(self),
CreateClosureClasses(self), ## After all lookups and type inference
ExpandInplaceOperators(self),
OptimizeBuiltinCalls(self), ## Necessary?
IterationTransform(),
SwitchTransform(),
DropRefcountingTransform(),
FinalOptimizePhase(self),
GilCheck(),
]
def create_pyx_pipeline(self, options, result, py=False):
def generate_pyx_code(module_node):
module_node.process_implementation(options, result)
result.compilation_source = module_node.compilation_source
return result
def inject_pxd_code(module_node):
from textwrap import dedent
stats = module_node.body.stats
for name, (statlistnode, scope) in self.pxds.iteritems():
# Copy over function nodes to the module
# (this seems strange -- I believe the right concept is to split
# ModuleNode into a ModuleNode and a CodeGenerator, and tell that
# CodeGenerator to generate code both from the pyx and pxd ModuleNodes.
stats.append(statlistnode)
# Until utility code is moved to code generation phase everywhere,
# we need to copy it over to the main scope
module_node.scope.utility_code_list.extend(scope.utility_code_list)
return module_node
test_support = []
if options.evaluate_tree_assertions:
from Cython.TestUtils import TreeAssertVisitor
test_support.append(TreeAssertVisitor())
if options.gdb_debug:
from Cython.Debugger import DebugWriter
from ParseTreeTransforms import DebugTransform
self.gdb_debug_outputwriter = DebugWriter.CythonDebugWriter(
options.output_dir)
debug_transform = [DebugTransform(self, options, result)]
else:
debug_transform = []
return list(itertools.chain(
[create_parse(self)],
self.create_pipeline(pxd=False, py=py),
test_support,
[inject_pxd_code, abort_on_errors],
debug_transform,
[generate_pyx_code]))
def create_pxd_pipeline(self, scope, module_name):
def parse_pxd(source_desc):
tree = self.parse(source_desc, scope, pxd=True,
full_module_name=module_name)
tree.scope = scope
tree.is_pxd = True
return tree
from CodeGeneration import ExtractPxdCode
# The pxd pipeline ends up with a CCodeWriter containing the
# code of the pxd, as well as a pxd scope.
return [parse_pxd] + self.create_pipeline(pxd=True) + [
ExtractPxdCode(self),
]
def create_py_pipeline(self, options, result):
return self.create_pyx_pipeline(options, result, py=True)
def create_pyx_as_pxd_pipeline(self, source):
from ParseTreeTransforms import (AlignFunctionDefinitions,
MarkClosureVisitor, WithTransform, AnalyseDeclarationsTransform)
from Optimize import ConstantFolding, FlattenInListTransform
from Nodes import StatListNode
pipeline = []
result = create_default_resultobj(source, self.options)
pyx_pipeline = self.create_pyx_pipeline(self.options, result)
for stage in pyx_pipeline:
if stage.__class__ in [
AlignFunctionDefinitions,
MarkClosureVisitor,
ConstantFolding,
FlattenInListTransform,
WithTransform,
]:
# Skip these unnecessary stages.
continue
pipeline.append(stage)
if isinstance(stage, AnalyseDeclarationsTransform):
# This is the last stage we need.
break
def fake_pxd(root):
for entry in root.scope.entries.values():
entry.defined_in_pxd = 1
return StatListNode(root.pos, stats=[]), root.scope
pipeline.append(fake_pxd)
return pipeline
# pipeline creation functions can now be found in Pipeline.py
def process_pxd(self, source_desc, scope, module_name):
import Pipeline
if isinstance(source_desc, FileSourceDescriptor) and source_desc._file_type == 'pyx':
source = CompilationSource(source_desc, module_name, os.getcwd())
pipeline = self.create_pyx_as_pxd_pipeline(source)
result = self.run_pipeline(pipeline, source)
result_sink = create_default_resultobj(source, self.options)
pipeline = Pipeline.create_pyx_as_pxd_pipeline(self, result_sink)
result = Pipeline.run_pipeline(pipeline, source)
else:
pipeline = self.create_pxd_pipeline(scope, module_name)
result = self.run_pipeline(pipeline, source_desc)
pipeline = Pipeline.create_pxd_pipeline(self, scope, module_name)
result = Pipeline.run_pipeline(pipeline, source_desc)
return result
def nonfatal_error(self, exc):
return Errors.report_error(exc)
def run_pipeline(self, pipeline, source):
error = None
data = source
try:
try:
for phase in pipeline:
if phase is not None:
if DebugFlags.debug_verbose_pipeline:
t = time()
print "Entering pipeline phase %r" % phase
data = phase(data)
if DebugFlags.debug_verbose_pipeline:
print " %.3f seconds" % (time() - t)
except CompileError, err:
# err is set
Errors.report_error(err)
error = err
except InternalError, err:
# Only raise if there was not an earlier error
if Errors.num_errors == 0:
raise
error = err
except AbortError, err:
error = err
return (error, data)
def find_module(self, module_name,
relative_to = None, pos = None, need_pxd = 1):
# Finds and returns the module scope corresponding to
......@@ -604,21 +410,6 @@ class Context(object):
pass
result.c_file = None
def create_parse(context):
def parse(compsrc):
source_desc = compsrc.source_desc
full_module_name = compsrc.full_module_name
initial_pos = (source_desc, 1, 0)
saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False
scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0)
Options.cimport_from_pyx = saved_cimport_from_pyx
tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
tree.compilation_source = compsrc
tree.scope = scope
tree.is_pxd = False
return tree
return parse
def create_default_resultobj(compilation_source, options):
result = CompilationResult()
result.main_source_file = compilation_source.source_desc.filename
......@@ -635,6 +426,7 @@ def create_default_resultobj(compilation_source, options):
return result
def run_pipeline(source, options, full_module_name = None):
import Pipeline
# Set up context
context = options.create_context()
......@@ -665,12 +457,12 @@ def run_pipeline(source, options, full_module_name = None):
# Get pipeline
if source_ext.lower() == '.py' or not source_ext:
pipeline = context.create_py_pipeline(options, result)
pipeline = Pipeline.create_py_pipeline(context, options, result)
else:
pipeline = context.create_pyx_pipeline(options, result)
pipeline = Pipeline.create_pyx_pipeline(context, options, result)
context.setup_errors(options, result)
err, enddata = context.run_pipeline(pipeline, source)
err, enddata = Pipeline.run_pipeline(pipeline, source)
context.teardown_errors(err, options, result)
return result
......
......@@ -52,6 +52,34 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
child_attrs = ["body"]
directives = None
def merge_in(self, tree, scope, merge_scope=False):
# Merges in the contents of another tree, and possibly scope. With the
# current implementation below, this must be done right prior
# to code generation.
#
# Note: This way of doing it seems strange -- I believe the
# right concept is to split ModuleNode into a ModuleNode and a
# CodeGenerator, and tell that CodeGenerator to generate code
# from multiple sources.
assert isinstance(self.body, Nodes.StatListNode)
if isinstance(tree, Nodes.StatListNode):
self.body.stats.extend(tree.stats)
else:
self.body.stats.append(tree)
selfscope = self.scope
selfscope.utility_code_list.extend(scope.utility_code_list)
if merge_scope:
selfscope.entries.update(scope.entries)
for x in ('const_entries',
'type_entries',
'sue_entries',
'arg_entries',
'var_entries',
'pyfunc_entries',
'cfunc_entries',
'c_class_entries'):
getattr(selfscope, x).extend(getattr(scope, x))
def analyse_declarations(self, env):
if Options.embed_pos_in_docstring:
env.doc = EncodedString(u'File: %s (starting at line %s)' % Nodes.relative_position(self.pos))
......
import itertools
from time import time
import Errors
import DebugFlags
import Options
from Errors import PyrexError, CompileError, InternalError, AbortError, error
#
# Really small pipeline stages
#
def dumptree(t):
# For quick debugging in pipelines
print t.dump()
return t
def abort_on_errors(node):
# Stop the pipeline if there are any errors.
if Errors.num_errors != 0:
raise AbortError, "pipeline break"
return node
def parse_stage_factory(context):
def parse(compsrc):
source_desc = compsrc.source_desc
full_module_name = compsrc.full_module_name
initial_pos = (source_desc, 1, 0)
saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False
scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0)
Options.cimport_from_pyx = saved_cimport_from_pyx
tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
tree.compilation_source = compsrc
tree.scope = scope
tree.is_pxd = False
return tree
return parse
def parse_pxd_stage_factory(context, scope, module_name):
def parse(source_desc):
tree = context.parse(source_desc, scope, pxd=True,
full_module_name=module_name)
tree.scope = scope
tree.is_pxd = True
return tree
return parse
def generate_pyx_code_stage_factory(options, result):
def generate_pyx_code_stage(module_node):
module_node.process_implementation(options, result)
result.compilation_source = module_node.compilation_source
return result
return generate_pyx_code_stage
def inject_pxd_code_stage_factory(context):
def inject_pxd_code_stage(module_node):
from textwrap import dedent
stats = module_node.body.stats
for name, (statlistnode, scope) in context.pxds.iteritems():
module_node.merge_in(statlistnode, scope)
return module_node
return inject_pxd_code_stage
#
# Pipeline factories
#
def create_pipeline(context, mode):
assert mode in ('pyx', 'py', 'pxd')
from Visitor import PrintTree
from ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
from ParseTreeTransforms import ForwardDeclareTypes, AnalyseDeclarationsTransform
from ParseTreeTransforms import AnalyseExpressionsTransform
from ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from ParseTreeTransforms import InterpretCompilerDirectives, TransformBuiltinMethods
from ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
from TypeInference import MarkAssignments, MarkOverflowingArithmetic
from ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions
from ParseTreeTransforms import RemoveUnreachableCode, GilCheck
from FlowControl import CreateControlFlowGraph
from AnalysedTreeTransforms import AutoTestDictTransform
from AutoDocTransforms import EmbedSignature
from Optimize import FlattenInListTransform, SwitchTransform, IterationTransform
from Optimize import EarlyReplaceBuiltinCalls, OptimizeBuiltinCalls
from Optimize import ConstantFolding, FinalOptimizePhase
from Optimize import DropRefcountingTransform
from Buffer import IntroduceBufferAuxiliaryVars
from ModuleNode import check_c_declarations, check_c_declarations_pxd
if mode == 'pxd':
_check_c_declarations = check_c_declarations_pxd
_specific_post_parse = PxdPostParse(context)
else:
_check_c_declarations = check_c_declarations
_specific_post_parse = None
if mode == 'py':
_align_function_definitions = AlignFunctionDefinitions(context)
else:
_align_function_definitions = None
return [
NormalizeTree(context),
PostParse(context),
_specific_post_parse,
InterpretCompilerDirectives(context, context.compiler_directives),
ParallelRangeTransform(context),
AdjustDefByDirectives(context),
MarkClosureVisitor(context),
_align_function_definitions,
RemoveUnreachableCode(context),
ConstantFolding(),
FlattenInListTransform(),
WithTransform(context),
DecoratorTransform(context),
ForwardDeclareTypes(context),
AnalyseDeclarationsTransform(context),
AutoTestDictTransform(context),
EmbedSignature(context),
EarlyReplaceBuiltinCalls(context), ## Necessary?
TransformBuiltinMethods(context), ## Necessary?
CreateControlFlowGraph(context),
RemoveUnreachableCode(context),
MarkAssignments(context),
MarkOverflowingArithmetic(context),
IntroduceBufferAuxiliaryVars(context),
_check_c_declarations,
AnalyseExpressionsTransform(context),
CreateClosureClasses(context), ## After all lookups and type inference
ExpandInplaceOperators(context),
OptimizeBuiltinCalls(context), ## Necessary?
IterationTransform(),
SwitchTransform(),
DropRefcountingTransform(),
FinalOptimizePhase(context),
GilCheck(),
]
def create_pyx_pipeline(context, options, result, py=False):
if py:
mode = 'py'
else:
mode = 'pyx'
test_support = []
if options.evaluate_tree_assertions:
from Cython.TestUtils import TreeAssertVisitor
test_support.append(TreeAssertVisitor())
if options.gdb_debug:
from Cython.Debugger import DebugWriter
from ParseTreeTransforms import DebugTransform
context.gdb_debug_outputwriter = DebugWriter.CythonDebugWriter(
options.output_dir)
debug_transform = [DebugTransform(context, options, result)]
else:
debug_transform = []
return list(itertools.chain(
[parse_stage_factory(context)],
create_pipeline(context, mode),
test_support,
[inject_pxd_code_stage_factory(context),
abort_on_errors],
debug_transform,
[generate_pyx_code_stage_factory(options, result)]))
def create_pxd_pipeline(context, scope, module_name):
from CodeGeneration import ExtractPxdCode
# The pxd pipeline ends up with a CCodeWriter containing the
# code of the pxd, as well as a pxd scope.
return [
parse_pxd_stage_factory(context, scope, module_name)
] + create_pipeline(context, 'pxd') + [
ExtractPxdCode(context)
]
def create_py_pipeline(context, options, result):
return create_pyx_pipeline(context, options, result, py=True)
def create_pyx_as_pxd_pipeline(context, result):
from ParseTreeTransforms import (AlignFunctionDefinitions,
MarkClosureVisitor, WithTransform, AnalyseDeclarationsTransform)
from Optimize import ConstantFolding, FlattenInListTransform
from Nodes import StatListNode
pipeline = []
pyx_pipeline = create_pyx_pipeline(context, context.options, result)
for stage in pyx_pipeline:
if stage.__class__ in [
AlignFunctionDefinitions,
MarkClosureVisitor,
ConstantFolding,
FlattenInListTransform,
WithTransform,
]:
# Skip these unnecessary stages.
continue
pipeline.append(stage)
if isinstance(stage, AnalyseDeclarationsTransform):
# This is the last stage we need.
break
def fake_pxd(root):
for entry in root.scope.entries.values():
entry.defined_in_pxd = 1
return StatListNode(root.pos, stats=[]), root.scope
pipeline.append(fake_pxd)
return pipeline
#
# Running a pipeline
#
def run_pipeline(pipeline, source):
error = None
data = source
try:
try:
for phase in pipeline:
if phase is not None:
if DebugFlags.debug_verbose_pipeline:
t = time()
print "Entering pipeline phase %r" % phase
data = phase(data)
if DebugFlags.debug_verbose_pipeline:
print " %.3f seconds" % (time() - t)
except CompileError, err:
# err is set
Errors.report_error(err)
error = err
except InternalError, err:
# Only raise if there was not an earlier error
if Errors.num_errors == 0:
raise
error = err
except AbortError, err:
error = err
return (error, data)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment