Commit 544f9242 authored by Robert Bradshaw's avatar Robert Bradshaw

Merge package and compiler repositories.

parents bb088c2a f2fdd6bc
# Note: Work in progress
import os
import re
import time
from StringIO import StringIO
import Version
from Code import CCodeWriter
# need one-characters subsitutions (for now) so offsets aren't off
special_chars = [('<', '\xF0', '&lt;'),
('>', '\xF1', '&gt;'),
('&', '\xF2', '&amp;')]
class AnnotationCCodeWriter(CCodeWriter):
def __init__(self, f):
CCodeWriter.__init__(self, self)
self.buffer = StringIO()
self.real_f = f
self.annotations = []
self.last_pos = None
self.code = {}
def getvalue(self):
return self.real_f.getvalue()
def write(self, s):
self.real_f.write(s)
self.buffer.write(s)
def mark_pos(self, pos):
# if pos is not None:
# CCodeWriter.mark_pos(self, pos)
# return
if self.last_pos:
try:
code = self.code[self.last_pos[1]]
except KeyError:
code = ""
self.code[self.last_pos[1]] = code + self.buffer.getvalue()
self.buffer = StringIO()
self.last_pos = pos
def annotate(self, pos, item):
self.annotations.append((pos, item))
def save_annotation(self, filename):
self.mark_pos(None)
f = open(filename)
lines = f.readlines()
for k in range(len(lines)):
line = lines[k]
for c, cc, html in special_chars:
line = line.replace(c, cc)
lines[k] = line
f.close()
all = []
for pos, item in self.annotations:
if pos[0] == filename:
start = item.start()
size, end = item.end()
if size:
all.append((pos, start))
all.append(((filename, pos[1], pos[2]+size), end))
else:
all.append((pos, start+end))
all.sort()
all.reverse()
for pos, item in all:
_, line_no, col = pos
line_no -= 1
col += 1
line = lines[line_no]
lines[line_no] = line[:col] + item + line[col:]
f = open("%s.html" % filename, "w")
f.write('<html>\n')
f.write("""
<head>
<style type="text/css">
body { font-family: courier; font-size: 12; }
.code { font-size: 9; color: #444444; display: none; margin-left: 20px; }
.py_api { color: red; }
.pyx_api { color: #FF3000; }
.py_macro_api { color: #FF8000; }
.error_goto { color: #FF8000; }
.tag { }
.coerce { color: #008000; border: 1px dotted #008000 }
.py_attr { color: #FF0000; font-weight: bold; }
.c_attr { color: #0000FF; }
.py_call { color: #FF0000; font-weight: bold; }
.c_call { color: #0000FF; }
.line { margin: 0em }
</style>
<script>
function toggleDiv(id) {
theDiv = document.getElementById(id);
if (theDiv.style.display == 'none') theDiv.style.display = 'block';
else theDiv.style.display = 'none';
}
</script>
</head>
""")
f.write('<body>\n')
f.write('<p>Generated by Cython %s on %s\n' % (Version.version, time.asctime()))
c_file = os.path.basename(filename)[:-3] + 'c'
f.write('<p>Raw output: <a href="%s">%s</a>\n' % (c_file, c_file))
k = 0
py_c_api = re.compile('(Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]+)')
pyx_api = re.compile('(__Pyx[A-Za-z_]+)\(')
py_marco_api = re.compile('(Py[A-Za-z]*_[A-Z][A-Z_]+)')
error_goto = re.compile(r'((; *if .*)? \{__pyx_filename = .*goto __pyx_L\w+;\})')
for line in lines:
k += 1
try:
code = self.code[k]
except KeyError:
code = ''
code, c_api_calls = py_c_api.subn(r"<span class='py_api'>\1</span>", code)
code, pyx_api_calls = pyx_api.subn(r"<span class='pyx_api'>\1</span>(", code)
code, macro_api_calls = py_marco_api.subn(r"<span class='py_macro_api'>\1</span>", code)
code, error_goto_calls = error_goto.subn(r"<span class='error_goto'>\1</span>", code)
code = code.replace("<span class='error_goto'>;", ";<span class='error_goto'>")
color = "FFFF%02x" % int(255/(1+(5*c_api_calls+2*pyx_api_calls+macro_api_calls)/10.0))
f.write("<pre class='line' style='background-color: #%s' onclick='toggleDiv(\"line%s\")'>" % (color, k))
f.write(" %d: " % k)
for c, cc, html in special_chars:
line = line.replace(cc, html)
f.write(line.rstrip())
f.write('</pre>\n')
f.write("<pre id='line%s' class='code' style='background-color: #%s'>%s</pre>" % (k, color, code))
f.write('</body></html>\n')
f.close()
# TODO: make this cleaner
def escape(raw_string):
raw_string = raw_string.replace("\'", r"&#146;")
raw_string = raw_string.replace('\"', r'&quot;')
raw_string = raw_string.replace('\n', r'<br>\n')
raw_string = raw_string.replace('\t', r'\t')
return raw_string
class AnnotationItem:
def __init__(self, style, text, tag="", size=0):
self.style = style
self.text = text
self.tag = tag
self.size = size
def start(self):
return "<span class='tag %s' title='%s'>%s" % (self.style, self.text, self.tag)
def end(self):
return self.size, "</span>"
#
# Pyrex - Builtin Definitions
#
from Symtab import BuiltinScope
from TypeSlots import Signature
builtin_function_table = [
# name, args, return, C API func, py equiv = "*"
('abs', "O", "O", "PyNumber_Absolute"),
#('chr', "", "", ""),
#('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result)
#('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start)
('delattr', "OO", "r", "PyObject_DelAttr"),
('dir', "O", "O", "PyObject_Dir"),
('divmod', "OO", "O", "PyNumber_Divmod"),
#('eval', "", "", ""),
#('execfile', "", "", ""),
#('filter', "", "", ""),
('getattr', "OO", "O", "PyObject_GetAttr"),
('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr"),
('hasattr', "OO", "b", "PyObject_HasAttr"),
('hash', "O", "l", "PyObject_Hash"),
#('hex', "", "", ""),
#('id', "", "", ""),
#('input', "", "", ""),
('intern', "s", "O", "PyString_InternFromString"),
('isinstance', "OO", "b", "PyObject_IsInstance"),
('issubclass', "OO", "b", "PyObject_IsSubclass"),
('iter', "O", "O", "PyObject_GetIter"),
('len', "O", "Z", "PyObject_Length"),
#('map', "", "", ""),
#('max', "", "", ""),
#('min', "", "", ""),
#('oct', "", "", ""),
# Not worth doing open, when second argument would become mandatory
#('open', "ss", "O", "PyFile_FromString"),
#('ord', "", "", ""),
('pow', "OOO", "O", "PyNumber_Power"),
#('range', "", "", ""),
#('raw_input', "", "", ""),
#('reduce', "", "", ""),
('reload', "O", "O", "PyImport_ReloadModule"),
('repr', "O", "O", "PyObject_Repr"),
#('round', "", "", ""),
('setattr', "OOO", "r", "PyObject_SetAttr"),
#('sum', "", "", ""),
#('unichr', "", "", ""),
#('unicode', "", "", ""),
#('vars', "", "", ""),
#('zip', "", "", ""),
# Can't do these easily until we have builtin type entries.
#('typecheck', "OO", "i", "PyObject_TypeCheck", False),
#('issubtype', "OO", "i", "PyType_IsSubtype", False),
]
# Builtin types
# bool
# buffer
# classmethod
# dict
# enumerate
# file
# float
# int
# list
# long
# object
# property
# slice
# staticmethod
# super
# str
# tuple
# type
# xrange
getattr3_utility_code = ["""
static PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); /*proto*/
""","""
static PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
PyObject *r = PyObject_GetAttr(o, n);
if (!r) {
if (!PyErr_ExceptionMatches(PyExc_AttributeError))
goto bad;
PyErr_Clear();
r = d;
Py_INCREF(d);
}
return r;
bad:
return 0;
}
"""]
builtin_utility_code = {
'getattr3': getattr3_utility_code,
}
builtin_scope = BuiltinScope()
def declare_builtin_func(name, args, ret, cname, py_equiv = "*"):
sig = Signature(args, ret)
type = sig.function_type()
utility = builtin_utility_code.get(name)
builtin_scope.declare_builtin_cfunction(name, type, cname, py_equiv, utility)
def init_builtin_funcs():
for desc in builtin_function_table:
declare_builtin_func(*desc)
def init_builtins():
init_builtin_funcs()
init_builtins()
#
# Cython - Command Line Parsing
#
import sys
import Options
import Transform
usage = """\
Cython (http://cython.org) is a compiler for code written in the
Cython language. Cython is based on Pyrex by Greg Ewing.
Usage: cython [options] sourcefile.pyx ...
Options:
-v, --version Display version number of cython compiler
-l, --create-listing Write error messages to a listing file
-I, --include-dir <directory> Search for include files in named directory
(multiply include directories are allowed).
-o, --output-file <filename> Specify name of generated C file
-p, --embed-positions If specified, the positions in Cython files of each
function definition is embedded in its docstring.
-z, --pre-import <module> If specified, assume undeclared names in this
module. Emulates the behavior of putting
"from <module> import *" at the top of the file.
--incref-local-binop Force local an extra incref on local variables before
performing any binary operations.
--cleanup <level> Release interned objects on python exit, for memory debugging.
Level indicates aggressiveness, default 0 releases nothing.
-D, --no-docstrings Remove docstrings.
-a, --annotate Produce an colorized version of the source.
--convert-range Convert for loops using range() function to for...from loops.
--cplus Output a c++ rather than c file.
"""
#The following experimental options are supported only on MacOSX:
# -C, --compile Compile generated .c file to .o file
# -X, --link Link .o file to produce extension module (implies -C)
# -+, --cplus Use C++ compiler for compiling and linking
# Additional .o files to link may be supplied when using -X."""
#The following options are very experimental and is used for plugging in code
#into different transform stages.
# -T phase:factory At the phase given, hand off the tree to the transform returned
# when calling factory without arguments. Factory should be fully
# specified (ie Module.SubModule.factory) and the containing module
# will be imported. This option can be repeated to add more transforms,
# transforms for the same phase will be used in the order they are given.
def bad_usage():
print >>sys.stderr, usage
sys.exit(1)
def parse_command_line(args):
def parse_add_transform(transforms, param):
def import_symbol(fqn):
modsplitpt = fqn.rfind(".")
if modsplitpt == -1: bad_usage()
modulename = fqn[:modsplitpt]
symbolname = fqn[modsplitpt+1:]
module = __import__(modulename, globals(), locals(), [symbolname])
return getattr(module, symbolname)
stagename, factoryname = param.split(":")
if not stagename in Transform.PHASES:
bad_usage()
factory = import_symbol(factoryname)
transform = factory()
transforms[stagename].append(transform)
from Cython.Compiler.Main import \
CompilationOptions, default_options
def pop_arg():
if args:
return args.pop(0)
else:
bad_usage()
def get_param(option):
tail = option[2:]
if tail:
return tail
else:
return pop_arg()
options = CompilationOptions(default_options)
sources = []
while args:
if args[0].startswith("-"):
option = pop_arg()
if option in ("-v", "--version"):
options.show_version = 1
elif option in ("-l", "--create-listing"):
options.use_listing_file = 1
elif option in ("-C", "--compile"):
options.c_only = 0
elif option in ("-X", "--link"):
options.c_only = 0
options.obj_only = 0
elif option in ("-+", "--cplus"):
options.cplus = 1
elif option.startswith("-I"):
options.include_path.append(get_param(option))
elif option == "--include-dir":
options.include_path.append(pop_arg())
elif option in ("-o", "--output-file"):
options.output_file = pop_arg()
elif option in ("-p", "--embed-positions"):
Options.embed_pos_in_docstring = 1
elif option in ("-z", "--pre-import"):
Options.pre_import = pop_arg()
elif option == "--incref-local-binop":
Options.incref_local_binop = 1
elif option == "--cleanup":
Options.generate_cleanup_code = int(pop_arg())
elif option in ("-D", "--no-docstrings"):
Options.docstrings = False
elif option in ("-a", "--annotate"):
Options.annotate = True
elif option == "--convert-range":
Options.convert_range = True
elif option.startswith("-T"):
parse_add_transform(options.transforms, get_param(option))
# Note: this can occur multiple times, each time appends
else:
bad_usage()
else:
arg = pop_arg()
if arg.endswith(".pyx"):
sources.append(arg)
elif arg.endswith(".o"):
options.objects.append(arg)
else:
print >>sys.stderr, \
"cython: %s: Unknown filename suffix" % arg
if options.objects and len(sources) > 1:
print >>sys.stderr, \
"cython: Only one source file allowed together with .o files"
if options.use_listing_file and len(sources) > 1:
print >>sys.stderr, \
"cython: Only one source file allowed when using -o"
sys.exit(1)
if len(sources) == 0 and not options.show_version:
bad_usage()
return options, sources
#
# Pyrex - Code output module
#
import Naming
import Options
from Cython.Utils import open_new_file
from PyrexTypes import py_object_type, typecast
from TypeSlots import method_coexist
class CCodeWriter:
# f file output file
# level int indentation level
# bol bool beginning of line?
# marker string comment to emit before next line
# return_label string function return point label
# error_label string error catch point label
# continue_label string loop continue point label
# break_label string loop break point label
# label_counter integer counter for naming labels
# in_try_finally boolean inside try of try...finally
# filename_table {string : int} for finding filename table indexes
# filename_list [string] filenames in filename table order
# exc_vars (string * 3) exception variables for reraise, or None
# input_file_contents dict contents (=list of lines) of any file that was used as input
# to create this output C code. This is
# used to annotate the comments.
in_try_finally = 0
def __init__(self, f):
#self.f = open_new_file(outfile_name)
self.f = f
self.level = 0
self.bol = 1
self.marker = None
self.last_marker = 1
self.label_counter = 1
self.error_label = None
self.filename_table = {}
self.filename_list = []
self.exc_vars = None
self.input_file_contents = {}
def putln(self, code = ""):
if self.marker and self.bol:
self.emit_marker()
if code:
self.put(code)
self.f.write("\n");
self.bol = 1
def emit_marker(self):
self.f.write("\n");
self.indent()
self.f.write("/* %s */\n" % self.marker)
self.last_marker = self.marker
self.marker = None
def put(self, code):
dl = code.count("{") - code.count("}")
if dl < 0:
self.level += dl
if self.bol:
self.indent()
self.f.write(code)
self.bol = 0
if dl > 0:
self.level += dl
def increase_indent(self):
self.level = self.level + 1
def decrease_indent(self):
self.level = self.level - 1
def begin_block(self):
self.putln("{")
self.increase_indent()
def end_block(self):
self.decrease_indent()
self.putln("}")
def indent(self):
self.f.write(" " * self.level)
def file_contents(self, file):
try:
return self.input_file_contents[file]
except KeyError:
F = [line.replace('*/', '*[inserted by cython to avoid comment closer]/')
for line in open(file).readlines()]
self.input_file_contents[file] = F
return F
def get_py_version_hex(self, pyversion):
return "0x%02X%02X%02X%02X" % (tuple(pyversion) + (0,0,0,0))[:4]
def mark_pos(self, pos):
if pos is None:
return
file, line, col = pos
contents = self.file_contents(file)
context = ''
for i in range(max(0,line-3), min(line+2, len(contents))):
s = contents[i]
if i+1 == line: # line numbers in pyrex start counting up from 1
s = s.rstrip() + ' # <<<<<<<<<<<<<< ' + '\n'
context += " * " + s
marker = '"%s":%s\n%s' % (file, line, context)
if self.last_marker != marker:
self.marker = marker
def init_labels(self):
self.label_counter = 0
self.labels_used = {}
self.return_label = self.new_label()
self.new_error_label()
self.continue_label = None
self.break_label = None
def new_label(self):
n = self.label_counter
self.label_counter = n + 1
return "%s%d" % (Naming.label_prefix, n)
def new_error_label(self):
old_err_lbl = self.error_label
self.error_label = self.new_label()
return old_err_lbl
def get_loop_labels(self):
return (
self.continue_label,
self.break_label)
def set_loop_labels(self, labels):
(self.continue_label,
self.break_label) = labels
def new_loop_labels(self):
old_labels = self.get_loop_labels()
self.set_loop_labels(
(self.new_label(),
self.new_label()))
return old_labels
def get_all_labels(self):
return (
self.continue_label,
self.break_label,
self.return_label,
self.error_label)
def set_all_labels(self, labels):
(self.continue_label,
self.break_label,
self.return_label,
self.error_label) = labels
def all_new_labels(self):
old_labels = self.get_all_labels()
new_labels = []
for old_label in old_labels:
if old_label:
new_labels.append(self.new_label())
else:
new_labels.append(old_label)
self.set_all_labels(new_labels)
return old_labels
def use_label(self, lbl):
self.labels_used[lbl] = 1
def label_used(self, lbl):
return lbl in self.labels_used
def put_label(self, lbl):
if lbl in self.labels_used:
self.putln("%s:;" % lbl)
def put_goto(self, lbl):
self.use_label(lbl)
self.putln("goto %s;" % lbl)
def put_var_declarations(self, entries, static = 0, dll_linkage = None,
definition = True):
for entry in entries:
if not entry.in_cinclude:
self.put_var_declaration(entry, static, dll_linkage, definition)
def put_var_declaration(self, entry, static = 0, dll_linkage = None,
definition = True):
#print "Code.put_var_declaration:", entry.name, "definition =", definition ###
visibility = entry.visibility
if visibility == 'private' and not definition:
#print "...private and not definition, skipping" ###
return
if not entry.used and visibility == "private":
#print "not used and private, skipping" ###
return
storage_class = ""
if visibility == 'extern':
storage_class = Naming.extern_c_macro
elif visibility == 'public':
if not definition:
storage_class = Naming.extern_c_macro
elif visibility == 'private':
if static:
storage_class = "static"
if storage_class:
self.put("%s " % storage_class)
if visibility != 'public':
dll_linkage = None
self.put(entry.type.declaration_code(entry.cname,
dll_linkage = dll_linkage))
if entry.init is not None:
self.put(" = %s" % entry.type.literal_code(entry.init))
self.putln(";")
def entry_as_pyobject(self, entry):
type = entry.type
if (not entry.is_self_arg and not entry.type.is_complete()) \
or (entry.type.is_extension_type and entry.type.base_type):
return "(PyObject *)" + entry.cname
else:
return entry.cname
def as_pyobject(self, cname, type):
return typecast(py_object_type, type, cname)
def put_incref(self, cname, type):
self.putln("Py_INCREF(%s);" % self.as_pyobject(cname, type))
def put_decref(self, cname, type):
self.putln("Py_DECREF(%s);" % self.as_pyobject(cname, type))
def put_var_incref(self, entry):
if entry.type.is_pyobject:
self.putln("Py_INCREF(%s);" % self.entry_as_pyobject(entry))
def put_decref_clear(self, cname, type):
self.putln("Py_DECREF(%s); %s = 0;" % (
typecast(py_object_type, type, cname), cname))
#self.as_pyobject(cname, type), cname))
def put_xdecref(self, cname, type):
self.putln("Py_XDECREF(%s);" % self.as_pyobject(cname, type))
def put_xdecref_clear(self, cname, type):
self.putln("Py_XDECREF(%s); %s = 0;" % (
self.as_pyobject(cname, type), cname))
def put_var_decref(self, entry):
if entry.type.is_pyobject:
if entry.init_to_none is False:
self.putln("Py_XDECREF(%s);" % self.entry_as_pyobject(entry))
else:
self.putln("Py_DECREF(%s);" % self.entry_as_pyobject(entry))
def put_var_decref_clear(self, entry):
if entry.type.is_pyobject:
self.putln("Py_DECREF(%s); %s = 0;" % (
self.entry_as_pyobject(entry), entry.cname))
def put_var_xdecref(self, entry):
if entry.type.is_pyobject:
self.putln("Py_XDECREF(%s);" % self.entry_as_pyobject(entry))
def put_var_xdecref_clear(self, entry):
if entry.type.is_pyobject:
self.putln("Py_XDECREF(%s); %s = 0;" % (
self.entry_as_pyobject(entry), entry.cname))
def put_var_decrefs(self, entries, used_only = 0):
for entry in entries:
if not used_only or entry.used:
if entry.xdecref_cleanup:
self.put_var_xdecref(entry)
else:
self.put_var_decref(entry)
def put_var_xdecrefs(self, entries):
for entry in entries:
self.put_var_xdecref(entry)
def put_var_xdecrefs_clear(self, entries):
for entry in entries:
self.put_var_xdecref_clear(entry)
def put_init_to_py_none(self, cname, type):
py_none = typecast(type, py_object_type, "Py_None")
self.putln("%s = %s; Py_INCREF(Py_None);" % (cname, py_none))
def put_init_var_to_py_none(self, entry, template = "%s"):
code = template % entry.cname
#if entry.type.is_extension_type:
# code = "((PyObject*)%s)" % code
self.put_init_to_py_none(code, entry.type)
def put_pymethoddef(self, entry, term):
if entry.doc:
doc_code = entry.doc_cname
else:
doc_code = 0
method_flags = entry.signature.method_flags()
if method_flags:
if entry.is_special:
method_flags += [method_coexist]
self.putln(
'{"%s", (PyCFunction)%s, %s, %s}%s' % (
entry.name,
entry.func_cname,
"|".join(method_flags),
doc_code,
term))
def put_error_if_neg(self, pos, value):
# return self.putln("if (unlikely(%s < 0)) %s" % (value, self.error_goto(pos))) # TODO this path is almost _never_ taken, yet this macro makes is slower!
return self.putln("if (%s < 0) %s" % (value, self.error_goto(pos)))
def put_h_guard(self, guard):
self.putln("#ifndef %s" % guard)
self.putln("#define %s" % guard)
def error_goto(self, pos):
lbl = self.error_label
self.use_label(lbl)
if Options.c_line_in_traceback:
cinfo = " %s = %s;" % (Naming.clineno_cname, Naming.line_c_macro)
else:
cinfo = ""
return "{%s = %s[%s]; %s = %s;%s goto %s;}" % (
Naming.filename_cname,
Naming.filetable_cname,
self.lookup_filename(pos[0]),
Naming.lineno_cname,
pos[1],
cinfo,
lbl)
def error_goto_if(self, cond, pos):
if Options.gcc_branch_hints:
return "if (unlikely(%s)) %s" % (cond, self.error_goto(pos))
else:
return "if (%s) %s" % (cond, self.error_goto(pos))
def error_goto_if_null(self, cname, pos):
return self.error_goto_if("!%s" % cname, pos)
def error_goto_if_neg(self, cname, pos):
return self.error_goto_if("%s < 0" % cname, pos)
def error_goto_if_PyErr(self, pos):
return self.error_goto_if("PyErr_Occurred()", pos)
def lookup_filename(self, filename):
try:
index = self.filename_table[filename]
except KeyError:
index = len(self.filename_list)
self.filename_list.append(filename)
self.filename_table[filename] = index
return index
class PyrexCodeWriter:
# f file output file
# level int indentation level
def __init__(self, outfile_name):
self.f = open_new_file(outfile_name)
self.level = 0
def putln(self, code):
self.f.write("%s%s\n" % (" " * self.level, code))
def indent(self):
self.level += 1
def dedent(self):
self.level -= 1
import bisect
# This module keeps track of arbitrary "states" at any point of the code.
# A state is considered known if every path to the given point agrees on
# its state, otherwise it is None (i.e. unknown).
# It might be useful to be able to "freeze" the set of states by pushing
# all state changes to the tips of the trees for fast reading. Perhaps this
# could be done on get_state, clearing the cache on set_state (assuming
# incoming is immutable).
# This module still needs a lot of work, and probably should totally be
# redesigned. It doesn't take return, raise, continue, or break into
# account.
class ControlFlow:
def __init__(self, start_pos, incoming, parent):
self.start_pos = start_pos
self.incoming = incoming
if parent is None and incoming is not None:
parent = incoming.parent
self.parent = parent
self.tip = {}
self.end_pos = ((),)
def start_branch(self, pos):
self.end_pos = pos
branch_point = BranchingControlFlow(pos, self)
if self.parent is not None:
self.parent.branches[-1] = branch_point
return branch_point.branches[0]
def next_branch(self, pos):
self.end_pos = pos
return self.parent.new_branch(pos)
def finish_branch(self, pos):
self.end_pos = pos
self.parent.end_pos = pos
return LinearControlFlow(pos, self.parent)
def get_state(self, item, pos=((),())):
return self.get_pos_state(item, pos)[1]
def get_pos_state(self, item, pos=((),())):
# do some caching
if pos > self.end_pos:
try:
return self.tip[item]
except KeyError:
self.tip[item] = pos_state = self._get_pos_state(item, pos)
return pos_state
else:
return self._get_pos_state(item, pos)
class LinearControlFlow(ControlFlow):
def __init__(self, start_pos=(), incoming=None, parent=None):
ControlFlow.__init__(self, start_pos, incoming, parent)
self.events = {}
def set_state(self, pos, item, state):
if self.tip.has_key(item):
del self.tip[item]
if pos < self.start_pos:
if self.incoming is not None:
self.incoming.set_state(pos, item, state)
else:
if self.events.has_key(item):
event_list = self.events[item]
else:
event_list = []
self.events[item] = event_list
bisect.insort(event_list, (pos, state))
def _get_pos_state(self, item, pos):
if pos > self.start_pos:
if self.events.has_key(item):
event_list = self.events[item]
for event in event_list[::-1]:
if event[0] < pos:
return event
if self.incoming is not None:
return self.incoming.get_pos_state(item, pos)
else:
return None, None
def to_string(self, indent='', limit=None):
if len(self.events) == 0:
s = indent + "[no state changes]"
else:
all = []
for item, event_list in self.events.items():
for pos, state in event_list:
all.append((indent, pos, item, state))
all.sort()
all = ["%s%s: %s <- %s" % data for data in all]
s = "\n".join(all)
if self.incoming is not limit and self.incoming is not None:
s = "%s\n%s" % (self.incoming.to_string(indent, limit=limit), s)
return s
class BranchingControlFlow(ControlFlow):
def __init__(self, start_pos, incoming, parent=None):
ControlFlow.__init__(self, start_pos, incoming, parent)
self.branches = [LinearControlFlow(start_pos, incoming, parent=self)]
self.branch_starts = [start_pos]
def set_state(self, pos, item, state):
if self.tip.has_key(item):
del self.tip[item]
if pos < self.start_pos:
self.incoming.set_state(pos, item, state)
else:
for branch_pos, branch in zip(self.branch_starts[::-1], self.branches[::-1]):
if pos >= branch_pos:
branch.set_state(pos, item, state)
return
def _get_pos_state(self, item, pos):
if pos <= self.start_pos:
return self.incoming.get_pos_state(item, pos)
elif pos < self.end_pos:
for branch_pos, branch in zip(self.branch_starts[::-1], self.branches[::-1]):
if pos >= branch_pos:
return branch.get_pos_state(item, pos)
else:
last_pos, last_state = self.branches[0].get_pos_state(item, pos)
if last_state is None:
return None, None
for branch in self.branches[1:]:
other_pos, other_state = branch.get_pos_state(item, pos)
if other_state is None or other_state != last_state:
return None, None
elif last_pos is not other_pos:
last_pos = max(last_pos, other_pos)
return last_pos, last_state
def new_branch(self, pos):
self.branches.append(LinearControlFlow(pos, self.incoming, parent=self))
self.branch_starts.append(pos)
return self.branches[-1]
def to_string(self, indent='', limit=None):
join = "\n%sor\n" % indent
s = join.join([branch.to_string(indent+" ", limit=self.incoming) for branch in self.branches])
if self.incoming is not limit and self.incoming is not None:
s = "%s\n%s" % (self.incoming.to_string(indent, limit=limit), s)
return s
\ No newline at end of file
debug_disposal_code = 0
debug_temp_alloc = 0
debug_coercion = 0
#
# Pyrex - Errors
#
import sys
from Cython.Utils import open_new_file
class PyrexError(Exception):
pass
class PyrexWarning(Exception):
pass
def context(position):
F = open(position[0]).readlines()
s = ''.join(F[position[1]-6:position[1]])
s += ' '*(position[2]-1) + '^'
s = '-'*60 + '\n...\n' + s + '\n' + '-'*60 + '\n'
return s
class CompileError(PyrexError):
def __init__(self, position = None, message = ""):
self.position = position
self.message = message
if position:
pos_str = "%s:%d:%d: " % position
cont = context(position)
else:
pos_str = ""
cont = ''
Exception.__init__(self, '\nError converting Pyrex file to C:\n' + cont + '\n' + pos_str + message )
class CompileWarning(PyrexWarning):
def __init__(self, position = None, message = ""):
self.position = position
self.message = message
if position:
pos_str = "%s:%d:%d: " % position
else:
pos_str = ""
Exception.__init__(self, pos_str + message)
class InternalError(Exception):
# If this is ever raised, there is a bug in the compiler.
def __init__(self, message):
Exception.__init__(self, "Internal compiler error: %s"
% message)
listing_file = None
num_errors = 0
echo_file = None
def open_listing_file(path, echo_to_stderr = 1):
# Begin a new error listing. If path is None, no file
# is opened, the error counter is just reset.
global listing_file, num_errors, echo_file
if path is not None:
listing_file = open_new_file(path)
else:
listing_file = None
if echo_to_stderr:
echo_file = sys.stderr
else:
echo_file = None
num_errors = 0
def close_listing_file():
global listing_file
if listing_file:
listing_file.close()
listing_file = None
def error(position, message):
#print "Errors.error:", repr(position), repr(message) ###
global num_errors
err = CompileError(position, message)
line = "%s\n" % err
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
num_errors = num_errors + 1
return err
LEVEL=1 # warn about all errors level 1 or higher
def warning(position, message, level=0):
if level < LEVEL:
return
warn = CompileWarning(position, message)
line = "warning: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
return warn
This source diff could not be displayed because it is too large. You can view the blob instead.
#
# Pyrex Scanner - Lexical Definitions
#
# Changing anything in this file will cause Lexicon.pickle
# to be rebuilt next time pyrexc is run.
#
string_prefixes = "cCrRuU"
def make_lexicon():
from Cython.Plex import \
Str, Any, AnyBut, AnyChar, Rep, Rep1, Opt, Bol, Eol, Eof, \
TEXT, IGNORE, State, Lexicon
from Scanning import Method
letter = Any("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_")
digit = Any("0123456789")
octdigit = Any("01234567")
hexdigit = Any("0123456789ABCDEFabcdef")
indentation = Bol + Rep(Any(" \t"))
decimal = Rep1(digit)
dot = Str(".")
exponent = Any("Ee") + Opt(Any("+-")) + decimal
decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal)
name = letter + Rep(letter | digit)
intconst = decimal | (Str("0x") + Rep1(hexdigit))
longconst = intconst + Str("L")
fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent)
imagconst = (intconst | fltconst) + Any("jJ")
sq_string = (
Str("'") +
Rep(AnyBut("\\\n'") | (Str("\\") + AnyChar)) +
Str("'")
)
dq_string = (
Str('"') +
Rep(AnyBut('\\\n"') | (Str("\\") + AnyChar)) +
Str('"')
)
non_sq = AnyBut("'") | (Str('\\') + AnyChar)
tsq_string = (
Str("'''")
+ Rep(non_sq | (Str("'") + non_sq) | (Str("''") + non_sq))
+ Str("'''")
)
non_dq = AnyBut('"') | (Str('\\') + AnyChar)
tdq_string = (
Str('"""')
+ Rep(non_dq | (Str('"') + non_dq) | (Str('""') + non_dq))
+ Str('"""')
)
stringlit = Opt(Any(string_prefixes)) + (sq_string | dq_string | tsq_string| tdq_string)
beginstring = Opt(Any(string_prefixes)) + (Str("'") | Str('"') | Str("'''") | Str('"""'))
two_oct = octdigit + octdigit
three_oct = octdigit + octdigit + octdigit
two_hex = hexdigit + hexdigit
four_hex = two_hex + two_hex
escapeseq = Str("\\") + (two_oct | three_oct | two_hex |
Str('u') + four_hex | Str('x') + two_hex | AnyChar)
bra = Any("([{")
ket = Any(")]}")
punct = Any(":,;+-*/|&<>=.%`~^?")
diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=", "//")
spaces = Rep1(Any(" \t\f"))
comment = Str("#") + Rep(AnyBut("\n"))
escaped_newline = Str("\\\n")
lineterm = Eol + Opt(Str("\n"))
return Lexicon([
(name, 'IDENT'),
(intconst, 'INT'),
(longconst, 'LONG'),
(fltconst, 'FLOAT'),
(imagconst, 'IMAG'),
(punct | diphthong, TEXT),
(bra, Method('open_bracket_action')),
(ket, Method('close_bracket_action')),
(lineterm, Method('newline_action')),
#(stringlit, 'STRING'),
(beginstring, Method('begin_string_action')),
(comment, IGNORE),
(spaces, IGNORE),
(escaped_newline, IGNORE),
State('INDENT', [
(Opt(spaces) + Opt(comment) + lineterm, IGNORE),
(indentation, Method('indentation_action')),
(Eof, Method('eof_action'))
]),
State('SQ_STRING', [
(escapeseq, 'ESCAPE'),
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
(Str('"'), 'CHARS'),
(Str("\n"), Method('unclosed_string_action')),
(Str("'"), Method('end_string_action')),
(Eof, 'EOF')
]),
State('DQ_STRING', [
(escapeseq, 'ESCAPE'),
(Rep1(AnyBut('"\n\\')), 'CHARS'),
(Str("'"), 'CHARS'),
(Str("\n"), Method('unclosed_string_action')),
(Str('"'), Method('end_string_action')),
(Eof, 'EOF')
]),
State('TSQ_STRING', [
(escapeseq, 'ESCAPE'),
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
(Any("'\""), 'CHARS'),
(Str("\n"), 'NEWLINE'),
(Str("'''"), Method('end_string_action')),
(Eof, 'EOF')
]),
State('TDQ_STRING', [
(escapeseq, 'ESCAPE'),
(Rep1(AnyBut('"\'\n\\')), 'CHARS'),
(Any("'\""), 'CHARS'),
(Str("\n"), 'NEWLINE'),
(Str('"""'), Method('end_string_action')),
(Eof, 'EOF')
]),
(Eof, Method('eof_action'))
],
# FIXME: Plex 1.9 needs different args here from Plex 1.1.4
#debug_flags = scanner_debug_flags,
#debug_file = scanner_dump_file
)
#
# Cython Top Level
#
import os, sys, re
if sys.version_info[:2] < (2, 2):
print >>sys.stderr, "Sorry, Cython requires Python 2.2 or later"
sys.exit(1)
import os
from time import time
import Version
from Scanning import PyrexScanner
import Errors
from Errors import PyrexError, CompileError, error
import Parsing
from Symtab import BuiltinScope, ModuleScope
import Code
from Cython.Utils import replace_suffix
from Cython import Utils
import Transform
verbose = 0
class Context:
# This class encapsulates the context needed for compiling
# one or more Cython implementation files along with their
# associated and imported declaration files. It includes
# the root of the module import namespace and the list
# of directories to search for include files.
#
# modules {string : ModuleScope}
# include_directories [string]
def __init__(self, include_directories):
#self.modules = {"__builtin__" : BuiltinScope()}
import Builtin
self.modules = {"__builtin__" : Builtin.builtin_scope}
self.include_directories = include_directories
def find_module(self, module_name,
relative_to = None, pos = None, need_pxd = 1):
# Finds and returns the module scope corresponding to
# the given relative or absolute module name. If this
# is the first time the module has been requested, finds
# the corresponding .pxd file and process it.
# If relative_to is not None, it must be a module scope,
# and the module will first be searched for relative to
# that module, provided its name is not a dotted name.
debug_find_module = 0
if debug_find_module:
print("Context.find_module: module_name = %s, relative_to = %s, pos = %s, need_pxd = %s" % (
module_name, relative_to, pos, need_pxd))
scope = None
pxd_pathname = None
if "." not in module_name and relative_to:
if debug_find_module:
print("...trying relative import")
scope = relative_to.lookup_submodule(module_name)
if not scope:
qualified_name = relative_to.qualify_name(module_name)
pxd_pathname = self.find_pxd_file(qualified_name, pos)
if pxd_pathname:
scope = relative_to.find_submodule(module_name)
if not scope:
if debug_find_module:
print("...trying absolute import")
scope = self
for name in module_name.split("."):
scope = scope.find_submodule(name)
if debug_find_module:
print("...scope =", scope)
if not scope.pxd_file_loaded:
if debug_find_module:
print("...pxd not loaded")
scope.pxd_file_loaded = 1
if not pxd_pathname:
if debug_find_module:
print("...looking for pxd file")
pxd_pathname = self.find_pxd_file(module_name, pos)
if debug_find_module:
print("......found ", pxd_pathname)
if not pxd_pathname and need_pxd:
error(pos, "'%s.pxd' not found" % module_name)
if pxd_pathname:
try:
if debug_find_module:
print("Context.find_module: Parsing %s" % pxd_pathname)
pxd_tree = self.parse(pxd_pathname, scope.type_names, pxd = 1,
full_module_name = module_name)
pxd_tree.analyse_declarations(scope)
except CompileError:
pass
return scope
def find_pxd_file(self, module_name, pos):
# Search include directories for the .pxd file
# corresponding to the given (full) module name.
if "." in module_name:
pxd_filename = "%s.pxd" % os.path.join(*module_name.split('.'))
else:
pxd_filename = "%s.pxd" % module_name
return self.search_include_directories(pxd_filename, pos)
def find_include_file(self, filename, pos):
# Search list of include directories for filename.
# Reports an error and returns None if not found.
path = self.search_include_directories(filename, pos)
if not path:
error(pos, "'%s' not found" % filename)
return path
def search_include_directories(self, filename, pos):
# Search the list of include directories for the given
# file name. If a source file position is given, first
# searches the directory containing that file. Returns
# None if not found, but does not report an error.
dirs = self.include_directories
if pos:
here_dir = os.path.dirname(pos[0])
dirs = [here_dir] + dirs
for dir in dirs:
path = os.path.join(dir, filename)
if os.path.exists(path):
return path
return None
def lookup_submodule(self, name):
# Look up a top-level module. Returns None if not found.
return self.modules.get(name, None)
def find_submodule(self, name):
# Find a top-level module, creating a new one if needed.
scope = self.lookup_submodule(name)
if not scope:
scope = ModuleScope(name,
parent_module = None, context = self)
self.modules[name] = scope
return scope
def parse(self, source_filename, type_names, pxd, full_module_name):
# Parse the given source file and return a parse tree.
f = open(source_filename, "rU")
s = PyrexScanner(f, source_filename,
type_names = type_names, context = self)
try:
tree = Parsing.p_module(s, pxd, full_module_name)
finally:
f.close()
if Errors.num_errors > 0:
raise CompileError
return tree
def extract_module_name(self, path, options):
# Get the module name out of a source file pathname.
_, tail = os.path.split(path)
name, _ = os.path.splitext(tail)
return name
def compile(self, source, options = None, full_module_name = None):
# Compile a Pyrex implementation file in this context
# and return a CompilationResult.
if not options:
options = default_options
result = CompilationResult()
cwd = os.getcwd()
if full_module_name is None:
full_module_name, _ = os.path.splitext(source)
full_module_name = re.sub(r'[\\/]', '.', full_module_name)
full_module_name = re.sub(r'[^\w.]', '_', full_module_name)
source = os.path.join(cwd, source)
if options.use_listing_file:
result.listing_file = replace_suffix(source, ".lis")
Errors.open_listing_file(result.listing_file,
echo_to_stderr = options.errors_to_stderr)
else:
Errors.open_listing_file(None)
if options.output_file:
result.c_file = os.path.join(cwd, options.output_file)
else:
if options.cplus:
c_suffix = ".cpp"
else:
c_suffix = ".c"
result.c_file = replace_suffix(source, c_suffix)
c_stat = None
if result.c_file:
try:
c_stat = os.stat(result.c_file)
except EnvironmentError:
pass
module_name = full_module_name # self.extract_module_name(source, options)
initial_pos = (source, 1, 0)
scope = self.find_module(module_name, pos = initial_pos, need_pxd = 0)
errors_occurred = False
try:
tree = self.parse(source, scope.type_names, pxd = 0, full_module_name = full_module_name)
tree.process_implementation(scope, options, result)
except CompileError:
errors_occurred = True
Errors.close_listing_file()
result.num_errors = Errors.num_errors
if result.num_errors > 0:
errors_occurred = True
if errors_occurred and result.c_file:
try:
#os.unlink(result.c_file)
Utils.castrate_file(result.c_file, c_stat)
except EnvironmentError:
pass
result.c_file = None
if result.c_file and not options.c_only and c_compile:
result.object_file = c_compile(result.c_file,
verbose_flag = options.show_version,
cplus = options.cplus)
if not options.obj_only and c_link:
result.extension_file = c_link(result.object_file,
extra_objects = options.objects,
verbose_flag = options.show_version,
cplus = options.cplus)
return result
#------------------------------------------------------------------------
#
# Main Python entry point
#
#------------------------------------------------------------------------
class CompilationOptions:
"""
Options to the Cython compiler:
show_version boolean Display version number
use_listing_file boolean Generate a .lis file
errors_to_stderr boolean Echo errors to stderr when using .lis
include_path [string] Directories to search for include files
output_file string Name of generated .c file
generate_pxi boolean Generate .pxi file for public declarations
transforms Transform.TransformSet Transforms to use on the parse tree
Following options are experimental and only used on MacOSX:
c_only boolean Stop after generating C file (default)
obj_only boolean Stop after compiling to .o file
objects [string] Extra .o files to link with
cplus boolean Compile as c++ code
"""
def __init__(self, defaults = None, **kw):
self.include_path = []
self.objects = []
if defaults:
if isinstance(defaults, CompilationOptions):
defaults = defaults.__dict__
else:
defaults = default_options
self.__dict__.update(defaults)
self.__dict__.update(kw)
class CompilationResult:
"""
Results from the Cython compiler:
c_file string or None The generated C source file
h_file string or None The generated C header file
i_file string or None The generated .pxi file
api_file string or None The generated C API .h file
listing_file string or None File of error messages
object_file string or None Result of compiling the C file
extension_file string or None Result of linking the object file
num_errors integer Number of compilation errors
"""
def __init__(self):
self.c_file = None
self.h_file = None
self.i_file = None
self.api_file = None
self.listing_file = None
self.object_file = None
self.extension_file = None
def compile(source, options = None, c_compile = 0, c_link = 0,
full_module_name = None):
"""
compile(source, options = default_options)
Compile the given Cython implementation file and return
a CompilationResult object describing what was produced.
"""
if not options:
options = default_options
options = CompilationOptions(defaults = options)
if c_compile:
options.c_only = 0
if c_link:
options.obj_only = 0
context = Context(options.include_path)
return context.compile(source, options, full_module_name)
#------------------------------------------------------------------------
#
# Main command-line entry point
#
#------------------------------------------------------------------------
def main(command_line = 0):
args = sys.argv[1:]
any_failures = 0
if command_line:
from CmdLine import parse_command_line
options, sources = parse_command_line(args)
else:
options = default_options
sources = args
if options.show_version:
print >>sys.stderr, "Cython version %s" % Version.version
context = Context(options.include_path)
for source in sources:
try:
result = context.compile(source, options)
if result.num_errors > 0:
any_failures = 1
except PyrexError, e:
print >>sys.stderr, e
any_failures = 1
if any_failures:
sys.exit(1)
#------------------------------------------------------------------------
#
# Set the default options depending on the platform
#
#------------------------------------------------------------------------
default_options = dict(
show_version = 0,
use_listing_file = 0,
errors_to_stderr = 1,
c_only = 1,
obj_only = 1,
cplus = 0,
output_file = None,
generate_pxi = 0,
transforms = Transform.TransformSet())
if sys.platform == "mac":
from Cython.Mac.MacSystem import c_compile, c_link, CCompilerError
default_options['use_listing_file'] = 1
elif sys.platform == "darwin":
from Cython.Mac.DarwinSystem import c_compile, c_link, CCompilerError
else:
c_compile = None
c_link = None
#
# Pyrex - Module parse tree node
#
import os, time
from cStringIO import StringIO
from PyrexTypes import CPtrType
import Annotate
import Code
import Naming
import Nodes
import Options
import PyrexTypes
import TypeSlots
import Version
from Errors import error
from PyrexTypes import py_object_type
from Cython.Utils import open_new_file, replace_suffix
def recurse_vtab_check_inheritance(entry, b, dict):
base = entry
while base is not None:
if base.type.base_type is None or base.type.base_type.vtabstruct_cname is None:
return False
if base.type.base_type.vtabstruct_cname == b.type.vtabstruct_cname:
return True
if base.type.base_type.typedef_flag:
return True
base = dict[base.type.base_type.vtabstruct_cname]
return False
def recurse_vtabslot_check_inheritance(entry, b, dict):
base = entry
while base is not None:
if base.type.base_type is None:
return False
if base.type.base_type.objstruct_cname == b.type.objstruct_cname:
return True
if base.type.base_type.typedef_flag:
return True
base = dict[base.type.base_type.objstruct_cname]
return False
class ModuleNode(Nodes.Node, Nodes.BlockNode):
# doc string or None
# body StatListNode
#
# referenced_modules [ModuleScope]
# module_temp_cname string
# full_module_name string
children_attrs = ["body"]
def analyse_declarations(self, env):
if Options.embed_pos_in_docstring:
env.doc = 'File: %s (starting at line %s)'%Nodes.relative_position(self.pos)
if not self.doc is None:
env.doc = env.doc + '\\n' + self.doc
else:
env.doc = self.doc
self.body.analyse_declarations(env)
def process_implementation(self, env, options, result):
self.analyse_declarations(env)
env.check_c_classes()
self.body.analyse_expressions(env)
env.return_type = PyrexTypes.c_void_type
self.referenced_modules = []
self.find_referenced_modules(env, self.referenced_modules, {})
if self.has_imported_c_functions():
self.module_temp_cname = env.allocate_temp_pyobject()
env.release_temp(self.module_temp_cname)
self.generate_c_code(env, options, result)
self.generate_h_code(env, options, result)
self.generate_api_code(env, result)
def has_imported_c_functions(self):
for module in self.referenced_modules:
for entry in module.cfunc_entries:
if entry.defined_in_pxd:
return 1
return 0
def generate_h_code(self, env, options, result):
def h_entries(entries, pxd = 0):
return [entry for entry in entries
if entry.visibility == 'public' or pxd and entry.defined_in_pxd]
h_types = h_entries(env.type_entries)
h_vars = h_entries(env.var_entries)
h_funcs = h_entries(env.cfunc_entries)
h_extension_types = h_entries(env.c_class_entries)
if h_types or h_vars or h_funcs or h_extension_types:
result.h_file = replace_suffix(result.c_file, ".h")
h_code = Code.CCodeWriter(open_new_file(result.h_file))
if options.generate_pxi:
result.i_file = replace_suffix(result.c_file, ".pxi")
i_code = Code.PyrexCodeWriter(result.i_file)
else:
i_code = None
guard = Naming.h_guard_prefix + env.qualified_name.replace(".", "__")
h_code.put_h_guard(guard)
self.generate_extern_c_macro_definition(h_code)
self.generate_type_header_code(h_types, h_code)
h_code.putln("")
h_code.putln("#ifndef %s" % Naming.api_guard_prefix + self.api_name(env))
if h_vars:
h_code.putln("")
for entry in h_vars:
self.generate_public_declaration(entry, h_code, i_code)
if h_funcs:
h_code.putln("")
for entry in h_funcs:
self.generate_public_declaration(entry, h_code, i_code)
if h_extension_types:
h_code.putln("")
for entry in h_extension_types:
self.generate_cclass_header_code(entry.type, h_code)
if i_code:
self.generate_cclass_include_code(entry.type, i_code)
h_code.putln("")
h_code.putln("#endif")
h_code.putln("")
h_code.putln("PyMODINIT_FUNC init%s(void);" % env.module_name)
h_code.putln("")
h_code.putln("#endif")
def generate_public_declaration(self, entry, h_code, i_code):
h_code.putln("%s %s;" % (
Naming.extern_c_macro,
entry.type.declaration_code(
entry.cname, dll_linkage = "DL_IMPORT")))
if i_code:
i_code.putln("cdef extern %s" %
entry.type.declaration_code(entry.cname, pyrex = 1))
def api_name(self, env):
return env.qualified_name.replace(".", "__")
def generate_api_code(self, env, result):
api_funcs = []
public_extension_types = []
has_api_extension_types = 0
for entry in env.cfunc_entries:
if entry.api:
api_funcs.append(entry)
for entry in env.c_class_entries:
if entry.visibility == 'public':
public_extension_types.append(entry)
if entry.api:
has_api_extension_types = 1
if api_funcs or has_api_extension_types:
result.api_file = replace_suffix(result.c_file, "_api.h")
h_code = Code.CCodeWriter(open_new_file(result.api_file))
name = self.api_name(env)
guard = Naming.api_guard_prefix + name
h_code.put_h_guard(guard)
h_code.putln('#include "Python.h"')
if result.h_file:
h_code.putln('#include "%s"' % os.path.basename(result.h_file))
for entry in public_extension_types:
type = entry.type
h_code.putln("")
h_code.putln("static PyTypeObject *%s;" % type.typeptr_cname)
h_code.putln("#define %s (*%s)" % (
type.typeobj_cname, type.typeptr_cname))
if api_funcs:
h_code.putln("")
for entry in api_funcs:
type = CPtrType(entry.type)
h_code.putln("static %s;" % type.declaration_code(entry.cname))
h_code.putln("")
h_code.put_h_guard(Naming.api_func_guard + "import_module")
h_code.put(import_module_utility_code[1])
h_code.putln("")
h_code.putln("#endif")
if api_funcs:
h_code.putln("")
h_code.put(function_import_utility_code[1])
if public_extension_types:
h_code.putln("")
h_code.put(type_import_utility_code[1])
h_code.putln("")
h_code.putln("static int import_%s(void) {" % name)
h_code.putln("PyObject *module = 0;")
h_code.putln('module = __Pyx_ImportModule("%s");' % env.qualified_name)
h_code.putln("if (!module) goto bad;")
for entry in api_funcs:
sig = entry.type.signature_string()
h_code.putln(
'if (__Pyx_ImportFunction(module, "%s", (void**)&%s, "%s") < 0) goto bad;' % (
entry.name,
entry.cname,
sig))
h_code.putln("Py_DECREF(module); module = 0;")
for entry in public_extension_types:
self.generate_type_import_call(
entry.type, h_code,
"if (!%s) goto bad;" % entry.type.typeptr_cname)
h_code.putln("return 0;")
h_code.putln("bad:")
h_code.putln("Py_XDECREF(module);")
h_code.putln("return -1;")
h_code.putln("}")
h_code.putln("")
h_code.putln("#endif")
def generate_cclass_header_code(self, type, h_code):
h_code.putln("%s DL_IMPORT(PyTypeObject) %s;" % (
Naming.extern_c_macro,
type.typeobj_cname))
#self.generate_obj_struct_definition(type, h_code)
def generate_cclass_include_code(self, type, i_code):
i_code.putln("cdef extern class %s.%s:" % (
type.module_name, type.name))
i_code.indent()
var_entries = type.scope.var_entries
if var_entries:
for entry in var_entries:
i_code.putln("cdef %s" %
entry.type.declaration_code(entry.cname, pyrex = 1))
else:
i_code.putln("pass")
i_code.dedent()
def generate_c_code(self, env, options, result):
modules = self.referenced_modules
if Options.annotate:
code = Annotate.AnnotationCCodeWriter(StringIO())
else:
code = Code.CCodeWriter(StringIO())
code.h = Code.CCodeWriter(StringIO())
code.init_labels()
self.generate_module_preamble(env, modules, code.h)
code.putln("")
code.putln("/* Implementation of %s */" % env.qualified_name)
self.generate_const_definitions(env, code)
self.generate_interned_num_decls(env, code)
self.generate_interned_name_decls(env, code)
self.generate_py_string_decls(env, code)
self.generate_cached_builtins_decls(env, code)
self.body.generate_function_definitions(env, code, options.transforms)
code.mark_pos(None)
self.generate_interned_name_table(env, code)
self.generate_py_string_table(env, code)
self.generate_typeobj_definitions(env, code)
self.generate_method_table(env, code)
self.generate_filename_init_prototype(code)
self.generate_module_init_func(modules[:-1], env, code)
self.generate_module_init2_func(modules[:-1], env, code)
code.mark_pos(None)
self.generate_module_cleanup_func(env, code)
self.generate_filename_table(code)
self.generate_utility_functions(env, code)
self.generate_declarations_for_modules(env, modules, code.h)
f = open_new_file(result.c_file)
f.write(code.h.f.getvalue())
f.write("\n")
f.write(code.f.getvalue())
f.close()
result.c_file_generated = 1
if Options.annotate:
self.annotate(code)
code.save_annotation(result.c_file[:-1] + "pyx") # change?
def find_referenced_modules(self, env, module_list, modules_seen):
if env not in modules_seen:
modules_seen[env] = 1
for imported_module in env.cimported_modules:
self.find_referenced_modules(imported_module, module_list, modules_seen)
module_list.append(env)
def generate_module_preamble(self, env, cimported_modules, code):
code.putln('/* Generated by Cython %s on %s */' % (
Version.version, time.asctime()))
code.putln('')
code.putln('#define PY_SSIZE_T_CLEAN')
for filename in env.python_include_files:
code.putln('#include "%s"' % filename)
code.putln("#ifndef PY_LONG_LONG")
code.putln(" #define PY_LONG_LONG LONG_LONG")
code.putln("#endif")
code.putln("#if PY_VERSION_HEX < 0x02050000")
code.putln(" typedef int Py_ssize_t;")
code.putln(" #define PY_SSIZE_T_MAX INT_MAX")
code.putln(" #define PY_SSIZE_T_MIN INT_MIN")
code.putln(" #define PyInt_FromSsize_t(z) PyInt_FromLong(z)")
code.putln(" #define PyInt_AsSsize_t(o) PyInt_AsLong(o)")
code.putln(" #define PyNumber_Index(o) PyNumber_Int(o)")
code.putln(" #define PyIndex_Check(o) PyNumber_Check(o)")
code.putln("#endif")
code.putln("#if PY_VERSION_HEX < 0x02040000")
code.putln(" #define METH_COEXIST 0")
code.putln("#endif")
code.putln("#ifndef __stdcall")
code.putln(" #define __stdcall")
code.putln("#endif")
code.putln("#ifndef __cdecl")
code.putln(" #define __cdecl")
code.putln("#endif")
self.generate_extern_c_macro_definition(code)
code.putln("#include <math.h>")
self.generate_includes(env, cimported_modules, code)
code.putln('')
code.put(Nodes.utility_function_predeclarations)
code.put(PyrexTypes.type_conversion_predeclarations)
code.put(Nodes.branch_prediction_macros)
code.putln('')
code.putln('static PyObject *%s;' % env.module_cname)
code.putln('static PyObject *%s;' % Naming.builtins_cname)
code.putln('static PyObject *%s;' % Naming.empty_tuple)
if Options.pre_import is not None:
code.putln('static PyObject *%s;' % Naming.preimport_cname)
code.putln('static int %s;' % Naming.lineno_cname)
code.putln('static int %s = 0;' % Naming.clineno_cname)
code.putln('static char * %s= %s;' % (Naming.cfilenm_cname, Naming.file_c_macro))
code.putln('static char *%s;' % Naming.filename_cname)
code.putln('static char **%s;' % Naming.filetable_cname)
if env.doc:
code.putln('')
code.putln('static char %s[] = "%s";' % (env.doc_cname, env.doc))
def generate_extern_c_macro_definition(self, code):
name = Naming.extern_c_macro
code.putln("#ifdef __cplusplus")
code.putln('#define %s extern "C"' % name)
code.putln("#else")
code.putln("#define %s extern" % name)
code.putln("#endif")
def generate_includes(self, env, cimported_modules, code):
includes = env.include_files[:]
for module in cimported_modules:
for filename in module.include_files:
if filename not in includes:
includes.append(filename)
for filename in includes:
code.putln('#include "%s"' % filename)
def generate_filename_table(self, code):
code.putln("")
code.putln("static char *%s[] = {" % Naming.filenames_cname)
if code.filename_list:
for filename in code.filename_list:
filename = os.path.basename(filename)
escaped_filename = filename.replace("\\", "\\\\").replace('"', r'\"')
code.putln('"%s",' %
escaped_filename)
else:
# Some C compilers don't like an empty array
code.putln("0")
code.putln("};")
def generate_vtab_dict(self, module_list):
vtab_dict = {}
for module in module_list:
for entry in module.c_class_entries:
if not entry.in_cinclude:
type = entry.type
scope = type.scope
if type.vtabstruct_cname:
vtab_dict[type.vtabstruct_cname]=entry
return vtab_dict
def generate_vtab_list(self, vtab_dict):
vtab_list = list()
for entry in vtab_dict.itervalues():
vtab_list.append(entry)
for i in range(0,len(vtab_list)):
for j in range(0,len(vtab_list)):
if(recurse_vtab_check_inheritance(vtab_list[j],vtab_list[i], vtab_dict)==1):
if i > j:
vtab_list.insert(j,vtab_list[i])
if i > j:
vtab_list.pop(i+1)
else:
vtab_list.pop(i)
#for entry in vtab_list:
#print entry.type.vtabstruct_cname
return vtab_list
def generate_vtabslot_dict(self, module_list, env):
vtab_dict={}
type_entries=[]
for module in module_list:
definition = module is env
if definition:
type_entries.extend( env.type_entries)
else:
for entry in module.type_entries:
if entry.defined_in_pxd:
type_entries.append(entry)
for entry in type_entries:
type = entry.type
if type.is_extension_type:
if not entry.in_cinclude:
type = entry.type
scope = type.scope
vtab_dict[type.objstruct_cname]=entry
return vtab_dict
def generate_vtabslot_list(self, vtab_dict):
vtab_list = list()
for entry in vtab_dict.itervalues():
vtab_list.append(entry)
for i in range(0,len(vtab_list)):
for j in range(0,len(vtab_list)):
if(recurse_vtabslot_check_inheritance(vtab_list[j],vtab_list[i], vtab_dict)==1):
if i > j:
vtab_list.insert(j,vtab_list[i])
if i > j:
vtab_list.pop(i+1)
else:
vtab_list.pop(i)
#for entry in vtab_list:
#print entry.type.vtabstruct_cname
return vtab_list
def generate_type_definitions(self, env, modules, vtab_list, vtabslot_list, code):
for module in modules:
definition = module is env
if definition:
type_entries = module.type_entries
else:
type_entries = []
for entry in module.type_entries:
if entry.defined_in_pxd:
type_entries.append(entry)
self.generate_type_header_code(type_entries, code)
for entry in vtabslot_list:
self.generate_obj_struct_definition(entry.type, code)
for entry in vtab_list:
self.generate_typeobject_predeclaration(entry, code)
self.generate_exttype_vtable_struct(entry, code)
self.generate_exttype_vtabptr_declaration(entry, code)
def generate_declarations_for_modules(self, env, modules, code):
code.putln("")
code.putln("/* Declarations */")
vtab_dict = self.generate_vtab_dict(modules)
vtab_list = self.generate_vtab_list(vtab_dict)
vtabslot_dict = self.generate_vtabslot_dict(modules,env)
vtabslot_list = self.generate_vtabslot_list(vtabslot_dict)
self.generate_type_definitions(env, modules, vtab_list, vtabslot_list, code)
for module in modules:
definition = module is env
self.generate_global_declarations(module, code, definition)
self.generate_cfunction_predeclarations(module, code, definition)
def generate_type_header_code(self, type_entries, code):
# Generate definitions of structs/unions/enums/typedefs/objstructs.
#self.generate_gcc33_hack(env, code) # Is this still needed?
#for entry in env.type_entries:
for entry in type_entries:
if not entry.in_cinclude:
#print "generate_type_header_code:", entry.name, repr(entry.type) ###
type = entry.type
if type.is_typedef: # Must test this first!
self.generate_typedef(entry, code)
elif type.is_struct_or_union:
self.generate_struct_union_definition(entry, code)
elif type.is_enum:
self.generate_enum_definition(entry, code)
def generate_gcc33_hack(self, env, code):
# Workaround for spurious warning generation in gcc 3.3
code.putln("")
for entry in env.c_class_entries:
type = entry.type
if not type.typedef_flag:
name = type.objstruct_cname
if name.startswith("__pyx_"):
tail = name[6:]
else:
tail = name
code.putln("typedef struct %s __pyx_gcc33_%s;" % (
name, tail))
def generate_typedef(self, entry, code):
base_type = entry.type.typedef_base_type
code.putln("")
code.putln("typedef %s;" % base_type.declaration_code(entry.cname))
def sue_header_footer(self, type, kind, name):
if type.typedef_flag:
header = "typedef %s {" % kind
footer = "} %s;" % name
else:
header = "%s %s {" % (kind, name)
footer = "};"
return header, footer
def generate_struct_union_definition(self, entry, code):
code.mark_pos(entry.pos)
type = entry.type
scope = type.scope
if scope:
header, footer = \
self.sue_header_footer(type, type.kind, type.cname)
code.putln("")
code.putln(header)
var_entries = scope.var_entries
if not var_entries:
error(entry.pos,
"Empty struct or union definition not allowed outside a"
" 'cdef extern from' block")
for attr in var_entries:
code.putln(
"%s;" %
attr.type.declaration_code(attr.cname))
code.putln(footer)
def generate_enum_definition(self, entry, code):
code.mark_pos(entry.pos)
type = entry.type
name = entry.cname or entry.name or ""
header, footer = \
self.sue_header_footer(type, "enum", name)
code.putln("")
code.putln(header)
enum_values = entry.enum_values
if not enum_values:
error(entry.pos,
"Empty enum definition not allowed outside a"
" 'cdef extern from' block")
else:
last_entry = enum_values[-1]
for value_entry in enum_values:
if value_entry.value == value_entry.name:
value_code = value_entry.cname
else:
value_code = ("%s = %s" % (
value_entry.cname,
value_entry.value))
if value_entry is not last_entry:
value_code += ","
code.putln(value_code)
code.putln(footer)
def generate_typeobject_predeclaration(self, entry, code):
code.putln("")
name = entry.type.typeobj_cname
if name:
if entry.visibility == 'extern' and not entry.in_cinclude:
code.putln("%s DL_IMPORT(PyTypeObject) %s;" % (
Naming.extern_c_macro,
name))
elif entry.visibility == 'public':
#code.putln("DL_EXPORT(PyTypeObject) %s;" % name)
code.putln("%s DL_EXPORT(PyTypeObject) %s;" % (
Naming.extern_c_macro,
name))
# ??? Do we really need the rest of this? ???
#else:
# code.putln("staticforward PyTypeObject %s;" % name)
def generate_exttype_vtable_struct(self, entry, code):
code.mark_pos(entry.pos)
# Generate struct declaration for an extension type's vtable.
type = entry.type
scope = type.scope
if type.vtabstruct_cname:
code.putln("")
code.putln(
"struct %s {" %
type.vtabstruct_cname)
if type.base_type and type.base_type.vtabstruct_cname:
code.putln("struct %s %s;" % (
type.base_type.vtabstruct_cname,
Naming.obj_base_cname))
for method_entry in scope.cfunc_entries:
if not method_entry.is_inherited:
code.putln(
"%s;" % method_entry.type.declaration_code("(*%s)" % method_entry.name))
code.putln(
"};")
def generate_exttype_vtabptr_declaration(self, entry, code):
code.mark_pos(entry.pos)
# Generate declaration of pointer to an extension type's vtable.
type = entry.type
if type.vtabptr_cname:
code.putln("static struct %s *%s;" % (
type.vtabstruct_cname,
type.vtabptr_cname))
def generate_obj_struct_definition(self, type, code):
code.mark_pos(type.pos)
# Generate object struct definition for an
# extension type.
if not type.scope:
return # Forward declared but never defined
header, footer = \
self.sue_header_footer(type, "struct", type.objstruct_cname)
code.putln("")
code.putln(header)
base_type = type.base_type
if base_type:
code.putln(
"%s%s %s;" % (
("struct ", "")[base_type.typedef_flag],
base_type.objstruct_cname,
Naming.obj_base_cname))
else:
code.putln(
"PyObject_HEAD")
if type.vtabslot_cname and not (type.base_type and type.base_type.vtabslot_cname):
code.putln(
"struct %s *%s;" % (
type.vtabstruct_cname,
type.vtabslot_cname))
for attr in type.scope.var_entries:
code.putln(
"%s;" %
attr.type.declaration_code(attr.cname))
code.putln(footer)
def generate_global_declarations(self, env, code, definition):
code.putln("")
for entry in env.c_class_entries:
if definition or entry.defined_in_pxd:
code.putln("static PyTypeObject *%s = 0;" %
entry.type.typeptr_cname)
code.put_var_declarations(env.var_entries, static = 1,
dll_linkage = "DL_EXPORT", definition = definition)
code.put_var_declarations(env.default_entries, static = 1,
definition = definition)
def generate_cfunction_predeclarations(self, env, code, definition):
for entry in env.cfunc_entries:
if not entry.in_cinclude and (definition
or entry.defined_in_pxd or entry.visibility == 'extern'):
if entry.visibility in ('public', 'extern'):
dll_linkage = "DL_EXPORT"
else:
dll_linkage = None
type = entry.type
if not definition and entry.defined_in_pxd:
type = CPtrType(type)
header = type.declaration_code(entry.cname,
dll_linkage = dll_linkage)
if entry.visibility == 'private':
storage_class = "static "
elif entry.visibility == 'extern':
storage_class = "%s " % Naming.extern_c_macro
else:
storage_class = ""
code.putln("%s%s; /*proto*/" % (
storage_class,
header))
def generate_typeobj_definitions(self, env, code):
full_module_name = env.qualified_name
for entry in env.c_class_entries:
#print "generate_typeobj_definitions:", entry.name
#print "...visibility =", entry.visibility
if entry.visibility != 'extern':
type = entry.type
scope = type.scope
if scope: # could be None if there was an error
self.generate_exttype_vtable(scope, code)
self.generate_new_function(scope, code)
self.generate_dealloc_function(scope, code)
if scope.needs_gc():
self.generate_traverse_function(scope, code)
self.generate_clear_function(scope, code)
if scope.defines_any(["__getitem__"]):
self.generate_getitem_int_function(scope, code)
if scope.defines_any(["__setitem__", "__delitem__"]):
self.generate_ass_subscript_function(scope, code)
if scope.defines_any(["__setslice__", "__delslice__"]):
self.generate_ass_slice_function(scope, code)
if scope.defines_any(["__getattr__"]):
self.generate_getattro_function(scope, code)
if scope.defines_any(["__setattr__", "__delattr__"]):
self.generate_setattro_function(scope, code)
if scope.defines_any(["__get__"]):
self.generate_descr_get_function(scope, code)
if scope.defines_any(["__set__", "__delete__"]):
self.generate_descr_set_function(scope, code)
self.generate_property_accessors(scope, code)
self.generate_method_table(scope, code)
self.generate_member_table(scope, code)
self.generate_getset_table(scope, code)
self.generate_typeobj_definition(full_module_name, entry, code)
def generate_exttype_vtable(self, scope, code):
# Generate the definition of an extension type's vtable.
type = scope.parent_type
if type.vtable_cname:
code.putln("static struct %s %s;" % (
type.vtabstruct_cname,
type.vtable_cname))
def generate_self_cast(self, scope, code):
type = scope.parent_type
code.putln(
"%s = (%s)o;" % (
type.declaration_code("p"),
type.declaration_code("")))
def generate_new_function(self, scope, code):
tp_slot = TypeSlots.ConstructorSlot("tp_new", '__new__')
slot_func = scope.mangle_internal("tp_new")
type = scope.parent_type
base_type = type.base_type
py_attrs = []
for entry in scope.var_entries:
if entry.type.is_pyobject:
py_attrs.append(entry)
need_self_cast = type.vtabslot_cname or py_attrs
code.putln("")
code.putln(
"static PyObject *%s(PyTypeObject *t, PyObject *a, PyObject *k) {"
% scope.mangle_internal("tp_new"))
if need_self_cast:
code.putln(
"%s;"
% scope.parent_type.declaration_code("p"))
if base_type:
tp_new = TypeSlots.get_base_slot_function(scope, tp_slot)
if tp_new is None:
tp_new = "%s->tp_new" % base_type.typeptr_cname
code.putln(
"PyObject *o = %s(t, a, k);" % tp_new)
else:
code.putln(
"PyObject *o = (*t->tp_alloc)(t, 0);")
code.putln(
"if (!o) return 0;")
if need_self_cast:
code.putln(
"p = %s;"
% type.cast_code("o"))
#if need_self_cast:
# self.generate_self_cast(scope, code)
if type.vtabslot_cname:
code.putln("*(struct %s **)&p->%s = %s;" % (
type.vtabstruct_cname,
type.vtabslot_cname,
type.vtabptr_cname))
for entry in py_attrs:
if entry.name == "__weakref__":
code.putln("p->%s = 0;" % entry.cname)
else:
code.put_init_var_to_py_none(entry, "p->%s")
entry = scope.lookup_here("__new__")
if entry:
if entry.trivial_signature:
cinit_args = "o, %s, NULL" % Naming.empty_tuple
else:
cinit_args = "o, a, k"
code.putln(
"if (%s(%s) < 0) {" %
(entry.func_cname, cinit_args))
code.put_decref_clear("o", py_object_type);
code.putln(
"}")
code.putln(
"return o;")
code.putln(
"}")
def generate_dealloc_function(self, scope, code):
tp_slot = TypeSlots.ConstructorSlot("tp_dealloc", '__dealloc__')
slot_func = scope.mangle_internal("tp_dealloc")
base_type = scope.parent_type.base_type
if tp_slot.slot_code(scope) != slot_func:
return # never used
code.putln("")
code.putln(
"static void %s(PyObject *o) {"
% scope.mangle_internal("tp_dealloc"))
py_attrs = []
for entry in scope.var_entries:
if entry.type.is_pyobject and entry.name != "__weakref__":
py_attrs.append(entry)
if py_attrs or scope.lookup_here("__weakref__"):
self.generate_self_cast(scope, code)
self.generate_usr_dealloc_call(scope, code)
if scope.lookup_here("__weakref__"):
code.putln("if (p->__weakref__) PyObject_ClearWeakRefs(o);")
for entry in py_attrs:
code.put_xdecref("p->%s" % entry.cname, entry.type)
if base_type:
tp_dealloc = TypeSlots.get_base_slot_function(scope, tp_slot)
if tp_dealloc is None:
tp_dealloc = "%s->tp_dealloc" % base_type.typeptr_cname
code.putln(
"%s(o);" % tp_dealloc)
else:
code.putln(
"(*o->ob_type->tp_free)(o);")
code.putln(
"}")
def generate_usr_dealloc_call(self, scope, code):
entry = scope.lookup_here("__dealloc__")
if entry:
code.putln(
"{")
code.putln(
"PyObject *etype, *eval, *etb;")
code.putln(
"PyErr_Fetch(&etype, &eval, &etb);")
code.putln(
"++o->ob_refcnt;")
code.putln(
"%s(o);" %
entry.func_cname)
code.putln(
"if (PyErr_Occurred()) PyErr_WriteUnraisable(o);")
code.putln(
"--o->ob_refcnt;")
code.putln(
"PyErr_Restore(etype, eval, etb);")
code.putln(
"}")
def generate_traverse_function(self, scope, code):
tp_slot = TypeSlots.GCDependentSlot("tp_traverse")
slot_func = scope.mangle_internal("tp_traverse")
base_type = scope.parent_type.base_type
if tp_slot.slot_code(scope) != slot_func:
return # never used
code.putln("")
code.putln(
"static int %s(PyObject *o, visitproc v, void *a) {"
% slot_func)
py_attrs = []
for entry in scope.var_entries:
if entry.type.is_pyobject and entry.name != "__weakref__":
py_attrs.append(entry)
if base_type or py_attrs:
code.putln("int e;")
if py_attrs:
self.generate_self_cast(scope, code)
if base_type:
# want to call it explicitly if possible so inlining can be performed
static_call = TypeSlots.get_base_slot_function(scope, tp_slot)
if static_call:
code.putln("e = %s(o, v, a); if (e) return e;" % static_call)
else:
code.putln("if (%s->tp_traverse) {" % base_type.typeptr_cname)
code.putln(
"e = %s->tp_traverse(o, v, a); if (e) return e;" %
base_type.typeptr_cname)
code.putln("}")
for entry in py_attrs:
var_code = "p->%s" % entry.cname
code.putln(
"if (%s) {"
% var_code)
if entry.type.is_extension_type:
var_code = "((PyObject*)%s)" % var_code
code.putln(
"e = (*v)(%s, a); if (e) return e;"
% var_code)
code.putln(
"}")
code.putln(
"return 0;")
code.putln(
"}")
def generate_clear_function(self, scope, code):
tp_slot = TypeSlots.GCDependentSlot("tp_clear")
slot_func = scope.mangle_internal("tp_clear")
base_type = scope.parent_type.base_type
if tp_slot.slot_code(scope) != slot_func:
return # never used
code.putln("")
code.putln("static int %s(PyObject *o) {" % slot_func)
py_attrs = []
for entry in scope.var_entries:
if entry.type.is_pyobject and entry.name != "__weakref__":
py_attrs.append(entry)
if py_attrs:
self.generate_self_cast(scope, code)
code.putln("PyObject* tmp;")
if base_type:
# want to call it explicitly if possible so inlining can be performed
static_call = TypeSlots.get_base_slot_function(scope, tp_slot)
if static_call:
code.putln("%s(o);" % static_call)
else:
code.putln("if (%s->tp_clear) {" % base_type.typeptr_cname)
code.putln("%s->tp_clear(o);" % base_type.typeptr_cname)
code.putln("}")
for entry in py_attrs:
name = "p->%s" % entry.cname
code.putln("tmp = ((PyObject*)%s);" % name)
code.put_init_to_py_none(name, entry.type)
code.putln("Py_XDECREF(tmp);")
code.putln(
"return 0;")
code.putln(
"}")
def generate_getitem_int_function(self, scope, code):
# This function is put into the sq_item slot when
# a __getitem__ method is present. It converts its
# argument to a Python integer and calls mp_subscript.
code.putln(
"static PyObject *%s(PyObject *o, Py_ssize_t i) {" %
scope.mangle_internal("sq_item"))
code.putln(
"PyObject *r;")
code.putln(
"PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0;")
code.putln(
"r = o->ob_type->tp_as_mapping->mp_subscript(o, x);")
code.putln(
"Py_DECREF(x);")
code.putln(
"return r;")
code.putln(
"}")
def generate_ass_subscript_function(self, scope, code):
# Setting and deleting an item are both done through
# the ass_subscript method, so we dispatch to user's __setitem__
# or __delitem__, or raise an exception.
base_type = scope.parent_type.base_type
set_entry = scope.lookup_here("__setitem__")
del_entry = scope.lookup_here("__delitem__")
code.putln("")
code.putln(
"static int %s(PyObject *o, PyObject *i, PyObject *v) {" %
scope.mangle_internal("mp_ass_subscript"))
code.putln(
"if (v) {")
if set_entry:
code.putln(
"return %s(o, i, v);" %
set_entry.func_cname)
else:
self.generate_guarded_basetype_call(
base_type, "tp_as_mapping", "mp_ass_subscript", "o, i, v", code)
code.putln(
"PyErr_Format(PyExc_NotImplementedError,")
code.putln(
' "Subscript assignment not supported by %s", o->ob_type->tp_name);')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"else {")
if del_entry:
code.putln(
"return %s(o, i);" %
del_entry.func_cname)
else:
self.generate_guarded_basetype_call(
base_type, "tp_as_mapping", "mp_ass_subscript", "o, i, v", code)
code.putln(
"PyErr_Format(PyExc_NotImplementedError,")
code.putln(
' "Subscript deletion not supported by %s", o->ob_type->tp_name);')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"}")
def generate_guarded_basetype_call(
self, base_type, substructure, slot, args, code):
if base_type:
base_tpname = base_type.typeptr_cname
if substructure:
code.putln(
"if (%s->%s && %s->%s->%s)" % (
base_tpname, substructure, base_tpname, substructure, slot))
code.putln(
" return %s->%s->%s(%s);" % (
base_tpname, substructure, slot, args))
else:
code.putln(
"if (%s->%s)" % (
base_tpname, slot))
code.putln(
" return %s->%s(%s);" % (
base_tpname, slot, args))
def generate_ass_slice_function(self, scope, code):
# Setting and deleting a slice are both done through
# the ass_slice method, so we dispatch to user's __setslice__
# or __delslice__, or raise an exception.
base_type = scope.parent_type.base_type
set_entry = scope.lookup_here("__setslice__")
del_entry = scope.lookup_here("__delslice__")
code.putln("")
code.putln(
"static int %s(PyObject *o, Py_ssize_t i, Py_ssize_t j, PyObject *v) {" %
scope.mangle_internal("sq_ass_slice"))
code.putln(
"if (v) {")
if set_entry:
code.putln(
"return %s(o, i, j, v);" %
set_entry.func_cname)
else:
self.generate_guarded_basetype_call(
base_type, "tp_as_sequence", "sq_ass_slice", "o, i, j, v", code)
code.putln(
"PyErr_Format(PyExc_NotImplementedError,")
code.putln(
' "2-element slice assignment not supported by %s", o->ob_type->tp_name);')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"else {")
if del_entry:
code.putln(
"return %s(o, i, j);" %
del_entry.func_cname)
else:
self.generate_guarded_basetype_call(
base_type, "tp_as_sequence", "sq_ass_slice", "o, i, j, v", code)
code.putln(
"PyErr_Format(PyExc_NotImplementedError,")
code.putln(
' "2-element slice deletion not supported by %s", o->ob_type->tp_name);')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"}")
def generate_getattro_function(self, scope, code):
# First try to get the attribute using PyObject_GenericGetAttr.
# If that raises an AttributeError, call the user's __getattr__
# method.
entry = scope.lookup_here("__getattr__")
code.putln("")
code.putln(
"static PyObject *%s(PyObject *o, PyObject *n) {"
% scope.mangle_internal("tp_getattro"))
code.putln(
"PyObject *v = PyObject_GenericGetAttr(o, n);")
code.putln(
"if (!v && PyErr_ExceptionMatches(PyExc_AttributeError)) {")
code.putln(
"PyErr_Clear();")
code.putln(
"v = %s(o, n);" %
entry.func_cname)
code.putln(
"}")
code.putln(
"return v;")
code.putln(
"}")
def generate_setattro_function(self, scope, code):
# Setting and deleting an attribute are both done through
# the setattro method, so we dispatch to user's __setattr__
# or __delattr__ or fall back on PyObject_GenericSetAttr.
base_type = scope.parent_type.base_type
set_entry = scope.lookup_here("__setattr__")
del_entry = scope.lookup_here("__delattr__")
code.putln("")
code.putln(
"static int %s(PyObject *o, PyObject *n, PyObject *v) {" %
scope.mangle_internal("tp_setattro"))
code.putln(
"if (v) {")
if set_entry:
code.putln(
"return %s(o, n, v);" %
set_entry.func_cname)
else:
self.generate_guarded_basetype_call(
base_type, None, "tp_setattro", "o, n, v", code)
code.putln(
"return PyObject_GenericSetAttr(o, n, v);")
code.putln(
"}")
code.putln(
"else {")
if del_entry:
code.putln(
"return %s(o, n);" %
del_entry.func_cname)
else:
self.generate_guarded_basetype_call(
base_type, None, "tp_setattro", "o, n, v", code)
code.putln(
"return PyObject_GenericSetAttr(o, n, 0);")
code.putln(
"}")
code.putln(
"}")
def generate_descr_get_function(self, scope, code):
# The __get__ function of a descriptor object can be
# called with NULL for the second or third arguments
# under some circumstances, so we replace them with
# None in that case.
user_get_entry = scope.lookup_here("__get__")
code.putln("")
code.putln(
"static PyObject *%s(PyObject *o, PyObject *i, PyObject *c) {" %
scope.mangle_internal("tp_descr_get"))
code.putln(
"PyObject *r = 0;")
code.putln(
"if (!i) i = Py_None;")
code.putln(
"if (!c) c = Py_None;")
#code.put_incref("i", py_object_type)
#code.put_incref("c", py_object_type)
code.putln(
"r = %s(o, i, c);" %
user_get_entry.func_cname)
#code.put_decref("i", py_object_type)
#code.put_decref("c", py_object_type)
code.putln(
"return r;")
code.putln(
"}")
def generate_descr_set_function(self, scope, code):
# Setting and deleting are both done through the __set__
# method of a descriptor, so we dispatch to user's __set__
# or __delete__ or raise an exception.
base_type = scope.parent_type.base_type
user_set_entry = scope.lookup_here("__set__")
user_del_entry = scope.lookup_here("__delete__")
code.putln("")
code.putln(
"static int %s(PyObject *o, PyObject *i, PyObject *v) {" %
scope.mangle_internal("tp_descr_set"))
code.putln(
"if (v) {")
if user_set_entry:
code.putln(
"return %s(o, i, v);" %
user_set_entry.func_cname)
else:
self.generate_guarded_basetype_call(
base_type, None, "tp_descr_set", "o, i, v", code)
code.putln(
'PyErr_SetString(PyExc_NotImplementedError, "__set__");')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"else {")
if user_del_entry:
code.putln(
"return %s(o, i);" %
user_del_entry.func_cname)
else:
self.generate_guarded_basetype_call(
base_type, None, "tp_descr_set", "o, i, v", code)
code.putln(
'PyErr_SetString(PyExc_NotImplementedError, "__delete__");')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"}")
def generate_property_accessors(self, cclass_scope, code):
for entry in cclass_scope.property_entries:
property_scope = entry.scope
if property_scope.defines_any(["__get__"]):
self.generate_property_get_function(entry, code)
if property_scope.defines_any(["__set__", "__del__"]):
self.generate_property_set_function(entry, code)
def generate_property_get_function(self, property_entry, code):
property_scope = property_entry.scope
property_entry.getter_cname = property_scope.parent_scope.mangle(
Naming.prop_get_prefix, property_entry.name)
get_entry = property_scope.lookup_here("__get__")
code.putln("")
code.putln(
"static PyObject *%s(PyObject *o, void *x) {" %
property_entry.getter_cname)
code.putln(
"return %s(o);" %
get_entry.func_cname)
code.putln(
"}")
def generate_property_set_function(self, property_entry, code):
property_scope = property_entry.scope
property_entry.setter_cname = property_scope.parent_scope.mangle(
Naming.prop_set_prefix, property_entry.name)
set_entry = property_scope.lookup_here("__set__")
del_entry = property_scope.lookup_here("__del__")
code.putln("")
code.putln(
"static int %s(PyObject *o, PyObject *v, void *x) {" %
property_entry.setter_cname)
code.putln(
"if (v) {")
if set_entry:
code.putln(
"return %s(o, v);" %
set_entry.func_cname)
else:
code.putln(
'PyErr_SetString(PyExc_NotImplementedError, "__set__");')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"else {")
if del_entry:
code.putln(
"return %s(o);" %
del_entry.func_cname)
else:
code.putln(
'PyErr_SetString(PyExc_NotImplementedError, "__del__");')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"}")
def generate_typeobj_definition(self, modname, entry, code):
type = entry.type
scope = type.scope
for suite in TypeSlots.substructures:
suite.generate_substructure(scope, code)
code.putln("")
if entry.visibility == 'public':
header = "DL_EXPORT(PyTypeObject) %s = {"
else:
#header = "statichere PyTypeObject %s = {"
header = "PyTypeObject %s = {"
#code.putln(header % scope.parent_type.typeobj_cname)
code.putln(header % type.typeobj_cname)
code.putln(
"PyObject_HEAD_INIT(0)")
code.putln(
"0, /*ob_size*/")
code.putln(
'"%s.%s", /*tp_name*/' % (
self.full_module_name, scope.class_name))
if type.typedef_flag:
objstruct = type.objstruct_cname
else:
#objstruct = "struct %s" % scope.parent_type.objstruct_cname
objstruct = "struct %s" % type.objstruct_cname
code.putln(
"sizeof(%s), /*tp_basicsize*/" %
objstruct)
code.putln(
"0, /*tp_itemsize*/")
for slot in TypeSlots.slot_table:
slot.generate(scope, code)
code.putln(
"};")
def generate_method_table(self, env, code):
code.putln("")
code.putln(
"static struct PyMethodDef %s[] = {" %
env.method_table_cname)
for entry in env.pyfunc_entries:
code.put_pymethoddef(entry, ",")
code.putln(
"{0, 0, 0, 0}")
code.putln(
"};")
def generate_member_table(self, env, code):
#print "ModuleNode.generate_member_table: scope =", env ###
if env.public_attr_entries:
code.putln("")
code.putln(
"static struct PyMemberDef %s[] = {" %
env.member_table_cname)
type = env.parent_type
if type.typedef_flag:
objstruct = type.objstruct_cname
else:
objstruct = "struct %s" % type.objstruct_cname
for entry in env.public_attr_entries:
type_code = entry.type.pymemberdef_typecode
if entry.visibility == 'readonly':
flags = "READONLY"
else:
flags = "0"
code.putln('{"%s", %s, %s, %s, 0},' % (
entry.name,
type_code,
"offsetof(%s, %s)" % (objstruct, entry.cname),
flags))
code.putln(
"{0, 0, 0, 0, 0}")
code.putln(
"};")
def generate_getset_table(self, env, code):
if env.property_entries:
code.putln("")
code.putln(
"static struct PyGetSetDef %s[] = {" %
env.getset_table_cname)
for entry in env.property_entries:
code.putln(
'{"%s", %s, %s, %s, 0},' % (
entry.name,
entry.getter_cname or "0",
entry.setter_cname or "0",
entry.doc_cname or "0"))
code.putln(
"{0, 0, 0, 0, 0}")
code.putln(
"};")
def generate_interned_name_table(self, env, code):
code.mark_pos(None)
items = env.intern_map.items()
if items:
items.sort()
code.putln("")
code.putln(
"static __Pyx_InternTabEntry %s[] = {" %
Naming.intern_tab_cname)
for (name, cname) in items:
code.putln(
'{&%s, "%s"},' % (
cname,
name))
code.putln(
"{0, 0}")
code.putln(
"};")
def generate_py_string_table(self, env, code):
entries = env.all_pystring_entries
if entries:
code.putln("")
code.putln(
"static __Pyx_StringTabEntry %s[] = {" %
Naming.stringtab_cname)
for entry in entries:
code.putln(
"{&%s, %s, sizeof(%s), %d}," % (
entry.pystring_cname,
entry.cname,
entry.cname,
isinstance(entry.init, unicode)
))
code.putln(
"{0, 0, 0, 0}")
code.putln(
"};")
def generate_filename_init_prototype(self, code):
code.putln("");
code.putln("static void %s(void); /*proto*/" % Naming.fileinit_cname)
def generate_module_init_func(self, imported_modules, env, code):
code.putln("")
code.putln("PyMODINIT_FUNC init2%s(void);" % env.module_name)
code.putln("")
header = "PyMODINIT_FUNC init%s(void)" % env.module_name
code.putln("%s; /*proto*/" % header)
code.putln("%s {" % header)
# do we need any of these here, or just in init2?
code.put_var_declarations(env.temp_entries)
code.putln("/*--- Libary function declarations ---*/")
env.generate_library_function_declarations(code)
self.generate_filename_init_call(code)
code.putln("/*--- Module creation code ---*/")
self.generate_module_creation_code(env, code)
code.putln("/*--- Intern code ---*/")
self.generate_intern_code(env, code)
code.putln("/*--- String init code ---*/")
self.generate_string_init_code(env, code)
if Options.cache_builtins:
code.putln("/*--- Builtin init code ---*/")
self.generate_builtin_init_code(env, code)
code.putln("%s = PyTuple_New(0); %s" % (Naming.empty_tuple, code.error_goto_if_null(Naming.empty_tuple, self.pos)));
code.putln("%s = 0;" % Naming.skip_dispatch_cname);
code.putln("/*--- Global init code ---*/")
self.generate_global_init_code(env, code)
code.putln("/*--- Function export code ---*/")
self.generate_c_function_export_code(env, code)
env.use_utility_code(function_export_utility_code)
code.putln('if (__Pyx_ExportFunction("init2%s", (void*)init2%s, "int (void)") < 0) %s' % (env.module_name, env.module_name, code.error_goto((env.qualified_name,0,0) ) ) )
code.putln("/*--- Type init code ---*/")
self.generate_type_init_code(env, code)
code.putln("/*--- Type import code ---*/")
for module in imported_modules:
self.generate_type_import_code_for_module(module, env, code)
code.putln("/*--- Function import code ---*/")
for module in imported_modules:
self.generate_c_function_import_code_for_module(module, env, code)
env.use_utility_code(function_import_utility_code)
code.putln('init2%s();' % env.module_name)
if Options.generate_cleanup_code:
code.putln("if (__Pyx_RegisterCleanup()) %s;" % code.error_goto(self.pos))
code.putln("return;")
code.put_label(code.error_label)
code.put_var_xdecrefs(env.temp_entries)
code.putln('__Pyx_AddTraceback("%s");' % env.qualified_name)
env.use_utility_code(Nodes.traceback_utility_code)
code.putln('}')
def generate_module_init2_func(self, imported_modules, env, code):
code.putln("")
header = "PyMODINIT_FUNC init2%s(void)" % env.module_name
code.putln("%s; /*proto*/" % header)
code.putln("%s {" % header)
code.putln("static int __Pyx_unique = 0;")
code.putln("if (__Pyx_unique==1) return;")
code.putln("__Pyx_unique = 1;")
code.put_var_declarations(env.temp_entries)
code.putln("/*--- Execution code ---*/")
code.mark_pos(None)
self.body.generate_execution_code(code)
if Options.generate_cleanup_code:
code.putln("if (__Pyx_RegisterCleanup()) %s;" % code.error_goto(self.pos))
code.putln("return;")
code.put_label(code.error_label)
code.put_var_xdecrefs(env.temp_entries)
code.putln('__Pyx_AddTraceback("%s");' % env.qualified_name)
env.use_utility_code(Nodes.traceback_utility_code)
code.putln('}')
def generate_module_cleanup_func(self, env, code):
if not Options.generate_cleanup_code:
return
env.use_utility_code(import_module_utility_code)
env.use_utility_code(register_cleanup_utility_code)
code.putln()
code.putln('static PyObject* %s(PyObject *self, PyObject *unused) {' % Naming.cleanup_cname)
if Options.generate_cleanup_code >= 2:
code.putln("/*--- Global cleanup code ---*/")
rev_entries = list(env.var_entries)
rev_entries.reverse()
for entry in rev_entries:
if entry.visibility != 'extern':
if entry.type.is_pyobject:
code.put_var_decref_clear(entry)
if Options.generate_cleanup_code >= 3:
code.putln("/*--- Type import cleanup code ---*/")
for type, _ in env.types_imported.items():
code.put_decref("((PyObject*)%s)" % type.typeptr_cname, PyrexTypes.py_object_type)
if Options.cache_builtins:
code.putln("/*--- Builtin cleanup code ---*/")
for entry in env.cached_builtins:
code.put_var_decref_clear(entry)
code.putln("Py_DECREF(%s); %s = 0;" % (Naming.empty_tuple, Naming.empty_tuple));
code.putln("/*--- Intern cleanup code ---*/")
for entry in env.pynum_entries:
code.put_var_decref_clear(entry)
if env.intern_map:
for name, cname in env.intern_map.items():
code.put_decref_clear(cname, PyrexTypes.py_object_type)
code.putln("Py_INCREF(Py_None); return Py_None;")
code.putln('}')
def generate_filename_init_call(self, code):
code.putln("%s();" % Naming.fileinit_cname)
def generate_module_creation_code(self, env, code):
# Generate code to create the module object and
# install the builtins.
if env.doc:
doc = env.doc_cname
else:
doc = "0"
code.putln(
'%s = Py_InitModule4("%s", %s, %s, 0, PYTHON_API_VERSION);' % (
env.module_cname,
env.module_name,
env.method_table_cname,
doc))
code.putln(
"if (!%s) %s;" % (
env.module_cname,
code.error_goto(self.pos)));
code.putln(
'%s = PyImport_AddModule("__builtin__");' %
Naming.builtins_cname)
code.putln(
"if (!%s) %s;" % (
Naming.builtins_cname,
code.error_goto(self.pos)));
code.putln(
'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s;' % (
env.module_cname,
Naming.builtins_cname,
code.error_goto(self.pos)))
if Options.pre_import is not None:
code.putln(
'%s = PyImport_AddModule("%s");' % (
Naming.preimport_cname,
Options.pre_import))
code.putln(
"if (!%s) %s;" % (
Naming.preimport_cname,
code.error_goto(self.pos)));
def generate_intern_code(self, env, code):
for entry in env.pynum_entries:
code.putln("%s = PyInt_FromLong(%s); %s;" % (
entry.cname,
entry.init,
code.error_goto_if_null(entry.cname, self.pos)))
if env.intern_map:
env.use_utility_code(Nodes.init_intern_tab_utility_code);
code.putln(
"if (__Pyx_InternStrings(%s) < 0) %s;" % (
Naming.intern_tab_cname,
code.error_goto(self.pos)))
def generate_string_init_code(self, env, code):
if env.all_pystring_entries:
env.use_utility_code(Nodes.init_string_tab_utility_code)
code.putln(
"if (__Pyx_InitStrings(%s) < 0) %s;" % (
Naming.stringtab_cname,
code.error_goto(self.pos)))
def generate_builtin_init_code(self, env, code):
# Lookup and cache builtin objects.
if Options.cache_builtins:
for entry in env.cached_builtins:
if Options.intern_names:
#assert entry.interned_cname is not None
code.putln(
'%s = __Pyx_GetName(%s, %s); if (!%s) %s' % (
entry.cname,
Naming.builtins_cname,
entry.interned_cname,
entry.cname,
code.error_goto(entry.pos)))
else:
code.putln(
'%s = __Pyx_GetName(%s, "%s"); if (!%s) %s' % (
entry.cname,
Naming.builtins_cname,
entry.name,
entry.cname,
code.error_goto(entry.pos)))
def generate_global_init_code(self, env, code):
# Generate code to initialise global PyObject *
# variables to None.
for entry in env.var_entries:
if entry.visibility != 'extern':
if entry.type.is_pyobject:
code.put_init_var_to_py_none(entry)
def generate_c_function_export_code(self, env, code):
# Generate code to create PyCFunction wrappers for exported C functions.
for entry in env.cfunc_entries:
if entry.api or entry.defined_in_pxd:
env.use_utility_code(function_export_utility_code)
signature = entry.type.signature_string()
code.putln('if (__Pyx_ExportFunction("%s", (void*)%s, "%s") < 0) %s' % (
entry.name,
entry.cname,
signature,
code.error_goto(self.pos)))
def generate_type_import_code_for_module(self, module, env, code):
# Generate type import code for all exported extension types in
# an imported module.
#if module.c_class_entries:
for entry in module.c_class_entries:
if entry.defined_in_pxd:
self.generate_type_import_code(env, entry.type, entry.pos, code)
def generate_c_function_import_code_for_module(self, module, env, code):
# Generate import code for all exported C functions in a cimported module.
entries = []
for entry in module.cfunc_entries:
if entry.defined_in_pxd:
entries.append(entry)
if entries:
env.use_utility_code(import_module_utility_code)
env.use_utility_code(function_import_utility_code)
temp = self.module_temp_cname
code.putln(
'%s = __Pyx_ImportModule("%s"); if (!%s) %s' % (
temp,
module.qualified_name,
temp,
code.error_goto(self.pos)))
for entry in entries:
code.putln(
'if (__Pyx_ImportFunction(%s, "%s", (void**)&%s, "%s") < 0) %s' % (
temp,
entry.name,
entry.cname,
entry.type.signature_string(),
code.error_goto(self.pos)))
code.putln("Py_DECREF(%s); %s = 0;" % (temp, temp))
def generate_type_init_code(self, env, code):
# Generate type import code for extern extension types
# and type ready code for non-extern ones.
for entry in env.c_class_entries:
if entry.visibility == 'extern':
self.generate_type_import_code(env, entry.type, entry.pos, code)
else:
self.generate_base_type_import_code(env, entry, code)
self.generate_exttype_vtable_init_code(entry, code)
self.generate_type_ready_code(env, entry, code)
self.generate_typeptr_assignment_code(entry, code)
def generate_base_type_import_code(self, env, entry, code):
base_type = entry.type.base_type
if base_type and base_type.module_name != env.qualified_name:
self.generate_type_import_code(env, base_type, self.pos, code)
def use_type_import_utility_code(self, env):
import ExprNodes
env.use_utility_code(type_import_utility_code)
env.use_utility_code(import_module_utility_code)
def generate_type_import_code(self, env, type, pos, code):
# If not already done, generate code to import the typeobject of an
# extension type defined in another module, and extract its C method
# table pointer if any.
if type in env.types_imported:
return
if type.typedef_flag:
objstruct = type.objstruct_cname
else:
objstruct = "struct %s" % type.objstruct_cname
self.generate_type_import_call(type, code,
code.error_goto_if_null(type.typeptr_cname, pos))
self.use_type_import_utility_code(env)
if type.vtabptr_cname:
code.putln(
"if (__Pyx_GetVtable(%s->tp_dict, &%s) < 0) %s" % (
type.typeptr_cname,
type.vtabptr_cname,
code.error_goto(pos)))
env.use_utility_code(Nodes.get_vtable_utility_code)
env.types_imported[type] = 1
def generate_type_import_call(self, type, code, error_code):
if type.typedef_flag:
objstruct = type.objstruct_cname
else:
objstruct = "struct %s" % type.objstruct_cname
code.putln('%s = __Pyx_ImportType("%s", "%s", sizeof(%s)); %s' % (
type.typeptr_cname,
type.module_name,
type.name,
objstruct,
error_code))
def generate_type_ready_code(self, env, entry, code):
# Generate a call to PyType_Ready for an extension
# type defined in this module.
type = entry.type
typeobj_cname = type.typeobj_cname
scope = type.scope
if scope: # could be None if there was an error
if entry.visibility != 'extern':
for slot in TypeSlots.slot_table:
slot.generate_dynamic_init_code(scope, code)
code.putln(
"if (PyType_Ready(&%s) < 0) %s" % (
typeobj_cname,
code.error_goto(entry.pos)))
if type.vtable_cname:
code.putln(
"if (__Pyx_SetVtable(%s.tp_dict, %s) < 0) %s" % (
typeobj_cname,
type.vtabptr_cname,
code.error_goto(entry.pos)))
env.use_utility_code(Nodes.set_vtable_utility_code)
code.putln(
'if (PyObject_SetAttrString(%s, "%s", (PyObject *)&%s) < 0) %s' % (
Naming.module_cname,
scope.class_name,
typeobj_cname,
code.error_goto(entry.pos)))
weakref_entry = scope.lookup_here("__weakref__")
if weakref_entry:
if weakref_entry.type is py_object_type:
tp_weaklistoffset = "%s.tp_weaklistoffset" % typeobj_cname
code.putln("if (%s == 0) %s = offsetof(struct %s, %s);" % (
tp_weaklistoffset,
tp_weaklistoffset,
type.objstruct_cname,
weakref_entry.cname))
else:
error(weakref_entry.pos, "__weakref__ slot must be of type 'object'")
def generate_exttype_vtable_init_code(self, entry, code):
# Generate code to initialise the C method table of an
# extension type.
type = entry.type
if type.vtable_cname:
code.putln(
"%s = &%s;" % (
type.vtabptr_cname,
type.vtable_cname))
if type.base_type and type.base_type.vtabptr_cname:
code.putln(
"%s.%s = *%s;" % (
type.vtable_cname,
Naming.obj_base_cname,
type.base_type.vtabptr_cname))
for meth_entry in type.scope.cfunc_entries:
if meth_entry.func_cname:
code.putln(
"*(void(**)(void))&%s.%s = (void(*)(void))%s;" % (
type.vtable_cname,
meth_entry.cname,
meth_entry.func_cname))
def generate_typeptr_assignment_code(self, entry, code):
# Generate code to initialise the typeptr of an extension
# type defined in this module to point to its type object.
type = entry.type
if type.typeobj_cname:
code.putln(
"%s = &%s;" % (
type.typeptr_cname, type.typeobj_cname))
def generate_utility_functions(self, env, code):
code.putln("")
code.putln("/* Runtime support code */")
code.putln("")
code.putln("static void %s(void) {" % Naming.fileinit_cname)
code.putln("%s = %s;" %
(Naming.filetable_cname, Naming.filenames_cname))
code.putln("}")
for utility_code in env.utility_code_used:
code.h.put(utility_code[0])
code.put(utility_code[1])
code.put(PyrexTypes.type_conversion_functions)
#------------------------------------------------------------------------------------
#
# Runtime support code
#
#------------------------------------------------------------------------------------
call_module_function_code = [
"""
static PyObject *__Pyx_CallModuleFunction(char* module, char *name); /*proto*/
""","""
static PyObject *__Pyx_CallModuleFunction(char* module, char *name)
{
PyObject* py_name = 0;
PyObject* py_module_name = 0;
PyObject* py_module = 0;
PyObject* py_dict = 0;
PyObject* py_func = 0;
PyObject* py_tuple = PyTuple_New(0);
PyObject* ret = 0;
py_dict = PyImport_GetModuleDict();
if(py_dict == 0)
goto bad;
if(py_tuple == 0)
goto bad;
py_name = PyString_FromString(name);
if(py_name == 0)
goto bad;
py_module_name = PyString_FromString(module);
if(py_module_name == 0)
goto bad;
py_module = PyObject_GetItem(py_dict, py_module);
if(py_module == 0)
goto bad;
if ( (py_func = PyObject_GetAttr(py_module, py_name) ) == 0)
goto bad;
if ( (ret = PyObject_Call(py_func, py_tuple,NULL) ) == 0)
goto bad;
return ret;
bad:
Py_XDECREF(py_name);
Py_XDECREF(py_module_name);
Py_XDECREF(py_module);
Py_XDECREF(py_dict);
Py_XDECREF(py_func);
return 0;
}
"""]
import_module_utility_code = [
"""
static PyObject *__Pyx_ImportModule(char *name); /*proto*/
""","""
#ifndef __PYX_HAVE_RT_ImportModule
#define __PYX_HAVE_RT_ImportModule
static PyObject *__Pyx_ImportModule(char *name) {
PyObject *py_name = 0;
PyObject *py_module = 0;
py_name = PyString_FromString(name);
if (!py_name)
goto bad;
py_module = PyImport_Import(py_name);
Py_DECREF(py_name);
return py_module;
bad:
Py_XDECREF(py_name);
return 0;
}
#endif
"""]
#------------------------------------------------------------------------------------
type_import_utility_code = [
"""
static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name, long size); /*proto*/
""","""
#ifndef __PYX_HAVE_RT_ImportType
#define __PYX_HAVE_RT_ImportType
static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name,
long size)
{
PyObject *py_module = 0;
PyObject *result = 0;
PyObject *py_name = 0;
py_name = PyString_FromString(module_name);
if (!py_name)
goto bad;
py_module = __Pyx_ImportModule(module_name);
if (!py_module)
goto bad;
result = PyObject_GetAttrString(py_module, class_name);
if (!result)
goto bad;
if (!PyType_Check(result)) {
PyErr_Format(PyExc_TypeError,
"%s.%s is not a type object",
module_name, class_name);
goto bad;
}
if (((PyTypeObject *)result)->tp_basicsize != size) {
PyErr_Format(PyExc_ValueError,
"%s.%s does not appear to be the correct type object",
module_name, class_name);
goto bad;
}
return (PyTypeObject *)result;
bad:
Py_XDECREF(py_name);
Py_XDECREF(result);
return 0;
}
#endif
"""]
#------------------------------------------------------------------------------------
function_export_utility_code = [
"""
static int __Pyx_ExportFunction(char *n, void *f, char *s); /*proto*/
""",r"""
static int __Pyx_ExportFunction(char *name, void *f, char *sig) {
PyObject *d = 0;
PyObject *p = 0;
d = PyObject_GetAttrString(%(MODULE)s, "%(API)s");
if (!d) {
PyErr_Clear();
d = PyDict_New();
if (!d)
goto bad;
Py_INCREF(d);
if (PyModule_AddObject(%(MODULE)s, "%(API)s", d) < 0)
goto bad;
}
p = PyCObject_FromVoidPtrAndDesc(f, sig, 0);
if (!p)
goto bad;
if (PyDict_SetItemString(d, name, p) < 0)
goto bad;
Py_DECREF(d);
return 0;
bad:
Py_XDECREF(p);
Py_XDECREF(d);
return -1;
}
""" % {'MODULE': Naming.module_cname, 'API': Naming.api_name}]
#------------------------------------------------------------------------------------
function_import_utility_code = [
"""
static int __Pyx_ImportFunction(PyObject *module, char *funcname, void **f, char *sig); /*proto*/
""","""
#ifndef __PYX_HAVE_RT_ImportFunction
#define __PYX_HAVE_RT_ImportFunction
static int __Pyx_ImportFunction(PyObject *module, char *funcname, void **f, char *sig) {
PyObject *d = 0;
PyObject *cobj = 0;
char *desc;
d = PyObject_GetAttrString(module, "%(API)s");
if (!d)
goto bad;
cobj = PyDict_GetItemString(d, funcname);
if (!cobj) {
PyErr_Format(PyExc_ImportError,
"%%s does not export expected C function %%s",
PyModule_GetName(module), funcname);
goto bad;
}
desc = (char *)PyCObject_GetDesc(cobj);
if (!desc)
goto bad;
if (strcmp(desc, sig) != 0) {
PyErr_Format(PyExc_TypeError,
"C function %%s.%%s has wrong signature (expected %%s, got %%s)",
PyModule_GetName(module), funcname, sig, desc);
goto bad;
}
*f = PyCObject_AsVoidPtr(cobj);
Py_DECREF(d);
return 0;
bad:
Py_XDECREF(d);
return -1;
}
#endif
""" % dict(API = Naming.api_name)]
register_cleanup_utility_code = [
"""
static int __Pyx_RegisterCleanup(void); /*proto*/
static PyObject* __pyx_module_cleanup(PyObject *self, PyObject *unused); /*proto*/
static PyMethodDef cleanup_def = {"__cleanup", (PyCFunction)&__pyx_module_cleanup, METH_NOARGS, 0};
""","""
static int __Pyx_RegisterCleanup(void) {
/* Don't use Py_AtExit because that has a 32-call limit
* and is called after python finalization.
*/
PyObject *cleanup_func = 0;
PyObject *atexit = 0;
PyObject *reg = 0;
PyObject *args = 0;
PyObject *res = 0;
int ret = -1;
cleanup_func = PyCFunction_New(&cleanup_def, 0);
args = PyTuple_New(1);
if (!cleanup_func || !args)
goto bad;
PyTuple_SET_ITEM(args, 0, cleanup_func);
cleanup_func = 0;
atexit = __Pyx_ImportModule("atexit");
if (!atexit)
goto bad;
reg = PyObject_GetAttrString(atexit, "register");
if (!reg)
goto bad;
res = PyObject_CallObject(reg, args);
if (!res)
goto bad;
ret = 0;
bad:
Py_XDECREF(cleanup_func);
Py_XDECREF(atexit);
Py_XDECREF(reg);
Py_XDECREF(args);
Py_XDECREF(res);
return ret;
}
"""]
#
# Pyrex - C naming conventions
#
#
# Prefixes for generating C names.
# Collected here to facilitate ensuring uniqueness.
#
pyrex_prefix = "__pyx_"
builtin_prefix = pyrex_prefix + "builtin_"
arg_prefix = pyrex_prefix + "arg_"
funcdoc_prefix = pyrex_prefix + "doc_"
enum_prefix = pyrex_prefix + "e_"
func_prefix = pyrex_prefix + "f_"
pyfunc_prefix = pyrex_prefix + "pf_"
gstab_prefix = pyrex_prefix + "getsets_"
prop_get_prefix = pyrex_prefix + "getprop_"
const_prefix = pyrex_prefix + "k"
label_prefix = pyrex_prefix + "L"
pymethdef_prefix = pyrex_prefix + "mdef_"
methtab_prefix = pyrex_prefix + "methods_"
memtab_prefix = pyrex_prefix + "members_"
interned_prefix = pyrex_prefix + "n_"
interned_num_prefix = pyrex_prefix + "int_"
objstruct_prefix = pyrex_prefix + "obj_"
typeptr_prefix = pyrex_prefix + "ptype_"
prop_set_prefix = pyrex_prefix + "setprop_"
type_prefix = pyrex_prefix + "t_"
typeobj_prefix = pyrex_prefix + "type_"
var_prefix = pyrex_prefix + "v_"
vtable_prefix = pyrex_prefix + "vtable_"
vtabptr_prefix = pyrex_prefix + "vtabptr_"
vtabstruct_prefix = pyrex_prefix + "vtabstruct_"
opt_arg_prefix = pyrex_prefix + "opt_args_"
args_cname = pyrex_prefix + "args"
kwdlist_cname = pyrex_prefix + "argnames"
obj_base_cname = pyrex_prefix + "base"
builtins_cname = pyrex_prefix + "b"
preimport_cname = pyrex_prefix + "i"
moddict_cname = pyrex_prefix + "d"
dummy_cname = pyrex_prefix + "dummy"
filename_cname = pyrex_prefix + "filename"
filetable_cname = pyrex_prefix + "f"
filenames_cname = pyrex_prefix + "filenames"
fileinit_cname = pyrex_prefix + "init_filenames"
intern_tab_cname = pyrex_prefix + "intern_tab"
kwds_cname = pyrex_prefix + "kwds"
lineno_cname = pyrex_prefix + "lineno"
clineno_cname = pyrex_prefix + "clineno"
cfilenm_cname = pyrex_prefix + "cfilenm"
module_cname = pyrex_prefix + "m"
moddoc_cname = pyrex_prefix + "mdoc"
methtable_cname = pyrex_prefix + "methods"
retval_cname = pyrex_prefix + "r"
reqd_kwds_cname = pyrex_prefix + "reqd_kwds"
self_cname = pyrex_prefix + "self"
stringtab_cname = pyrex_prefix + "string_tab"
vtabslot_cname = pyrex_prefix + "vtab"
c_api_tab_cname = pyrex_prefix + "c_api_tab"
gilstate_cname = pyrex_prefix + "state"
skip_dispatch_cname = pyrex_prefix + "skip_dispatch"
empty_tuple = pyrex_prefix + "empty_tuple"
cleanup_cname = pyrex_prefix + "module_cleanup"
optional_args_cname = pyrex_prefix + "optional_args"
no_opt_args = pyrex_prefix + "no_opt_args"
line_c_macro = "__LINE__"
file_c_macro = "__FILE__"
extern_c_macro = pyrex_prefix.upper() + "EXTERN_C"
exc_type_name = pyrex_prefix + "exc_type"
exc_value_name = pyrex_prefix + "exc_value"
exc_tb_name = pyrex_prefix + "exc_tb"
exc_lineno_name = pyrex_prefix + "exc_lineno"
exc_vars = (exc_type_name, exc_value_name, exc_tb_name)
api_name = pyrex_prefix + "capi__"
h_guard_prefix = "__PYX_HAVE__"
api_guard_prefix = "__PYX_HAVE_API__"
api_func_guard = "__PYX_HAVE_API_FUNC_"
def py_version_hex(major, minor=0, micro=0, release_level=0, release_serial=0):
return (major << 24) | (minor << 16) | (micro << 8) | (release_level << 4) | (release_serial)
This source diff could not be displayed because it is too large. You can view the blob instead.
#
# Pyrex - Compilation-wide options
#
intern_names = 1 # Intern global variable and attribute names
cache_builtins = 1 # Perform lookups on builtin names only once
embed_pos_in_docstring = 0
gcc_branch_hints = 1
pre_import = None
docstrings = True
# This is a SAGE-specific option that will
# cause Cython to incref local variables before
# performing a binary operation on them, for
# safe detection of inplace operators.
incref_local_binop = 0
# Decref global variables in this module on exit for garbage collection.
# 0: None, 1+: interned objects, 2+: cdef globals, 3+: types objects
# Mostly for reducing noise for Valgrind, only executes at process exit
# (when all memory will be reclaimed anyways).
generate_cleanup_code = 0
annotate = 0
# This will convert statements of the form "for i in range(...)"
# to "for i from ..." when i is a cdef'd integer type, and the direction
# (i.e. sign of step) can be determined.
# WARNING: This may change the symantics if the range causes assignment to
# i to overflow. Specifically, if this option is set, an error will be
# raised before the loop is entered, wheras without this option the loop
# will execute util a overflowing value is encountered.
convert_range = 0
# Enable this to allow one to write your_module.foo = ... to overwrite the
# definition if the cpdef function foo, at the cost of an extra dictionary
# lookup on every call.
# If this is 0 it simply creates a wrapper.
lookup_module_cpdef = 0
# This will set local variables to None rather than NULL which may cause
# surpress what would be an UnboundLocalError in pure Python but eliminates
# checking for NULL on every use, and can decref rather than xdecref at the end.
# WARNING: This is a work in progress, may currently segfault.
init_local_none = 1
# Optimize no argument and one argument methods by using the METH_O and METH_NOARGS
# calling conventions. These are faster calling conventions, but disallow the use of
# keywords (which, admittedly, are of little use in these cases).
optimize_simple_methods = 1
# Append the c file and line number to the traceback for exceptions.
c_line_in_traceback = 0
#
# Pyrex Parser
#
import os, re
from string import join, replace
from types import ListType, TupleType
from Scanning import PyrexScanner
import Nodes
import ExprNodes
from ModuleNode import ModuleNode
from Errors import error, InternalError
def p_ident(s, message = "Expected an identifier"):
if s.sy == 'IDENT':
name = s.systring
s.next()
return name
else:
s.error(message)
def p_ident_list(s):
names = []
while s.sy == 'IDENT':
names.append(s.systring)
s.next()
if s.sy != ',':
break
s.next()
return names
#------------------------------------------
#
# Expressions
#
#------------------------------------------
def p_binop_expr(s, ops, p_sub_expr):
#print "p_binop_expr:", ops, p_sub_expr ###
n1 = p_sub_expr(s)
#print "p_binop_expr(%s):" % p_sub_expr, s.sy ###
while s.sy in ops:
op = s.sy
pos = s.position()
s.next()
n2 = p_sub_expr(s)
n1 = ExprNodes.binop_node(pos, op, n1, n2)
return n1
#expression: or_test [if or_test else test] | lambda_form
def p_simple_expr(s):
pos = s.position()
expr = p_or_test(s)
if s.sy == 'if':
s.next()
test = p_or_test(s)
if s.sy == 'else':
s.next()
other = p_test(s)
return ExprNodes.CondExprNode(pos, test=test, true_val=expr, false_val=other)
else:
s.error("Expected 'else'")
else:
return expr
#test: or_test | lambda_form
def p_test(s):
return p_or_test(s)
#or_test: and_test ('or' and_test)*
def p_or_test(s):
#return p_binop_expr(s, ('or',), p_and_test)
return p_rassoc_binop_expr(s, ('or',), p_and_test)
def p_rassoc_binop_expr(s, ops, p_subexpr):
n1 = p_subexpr(s)
if s.sy in ops:
pos = s.position()
op = s.sy
s.next()
n2 = p_rassoc_binop_expr(s, ops, p_subexpr)
n1 = ExprNodes.binop_node(pos, op, n1, n2)
return n1
#and_test: not_test ('and' not_test)*
def p_and_test(s):
#return p_binop_expr(s, ('and',), p_not_test)
return p_rassoc_binop_expr(s, ('and',), p_not_test)
#not_test: 'not' not_test | comparison
def p_not_test(s):
if s.sy == 'not':
pos = s.position()
s.next()
return ExprNodes.NotNode(pos, operand = p_not_test(s))
else:
return p_comparison(s)
#comparison: expr (comp_op expr)*
#comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
def p_comparison(s):
n1 = p_bit_expr(s)
if s.sy in comparison_ops:
pos = s.position()
op = p_cmp_op(s)
n2 = p_bit_expr(s)
n1 = ExprNodes.PrimaryCmpNode(pos,
operator = op, operand1 = n1, operand2 = n2)
if s.sy in comparison_ops:
n1.cascade = p_cascaded_cmp(s)
return n1
def p_cascaded_cmp(s):
pos = s.position()
op = p_cmp_op(s)
n2 = p_bit_expr(s)
result = ExprNodes.CascadedCmpNode(pos,
operator = op, operand2 = n2)
if s.sy in comparison_ops:
result.cascade = p_cascaded_cmp(s)
return result
def p_cmp_op(s):
if s.sy == 'not':
s.next()
s.expect('in')
op = 'not_in'
elif s.sy == 'is':
s.next()
if s.sy == 'not':
s.next()
op = 'is_not'
else:
op = 'is'
else:
op = s.sy
s.next()
if op == '<>':
op = '!='
return op
comparison_ops = (
'<', '>', '==', '>=', '<=', '<>', '!=',
'in', 'is', 'not'
)
#expr: xor_expr ('|' xor_expr)*
def p_bit_expr(s):
return p_binop_expr(s, ('|',), p_xor_expr)
#xor_expr: and_expr ('^' and_expr)*
def p_xor_expr(s):
return p_binop_expr(s, ('^',), p_and_expr)
#and_expr: shift_expr ('&' shift_expr)*
def p_and_expr(s):
return p_binop_expr(s, ('&',), p_shift_expr)
#shift_expr: arith_expr (('<<'|'>>') arith_expr)*
def p_shift_expr(s):
return p_binop_expr(s, ('<<', '>>'), p_arith_expr)
#arith_expr: term (('+'|'-') term)*
def p_arith_expr(s):
return p_binop_expr(s, ('+', '-'), p_term)
#term: factor (('*'|'/'|'%') factor)*
def p_term(s):
return p_binop_expr(s, ('*', '/', '%', '//'), p_factor)
#factor: ('+'|'-'|'~'|'&'|typecast|sizeof) factor | power
def p_factor(s):
sy = s.sy
if sy in ('+', '-', '~'):
op = s.sy
pos = s.position()
s.next()
return ExprNodes.unop_node(pos, op, p_factor(s))
elif sy == '&':
pos = s.position()
s.next()
arg = p_factor(s)
return ExprNodes.AmpersandNode(pos, operand = arg)
elif sy == "<":
return p_typecast(s)
elif sy == 'IDENT' and s.systring == "sizeof":
return p_sizeof(s)
else:
return p_power(s)
def p_typecast(s):
# s.sy == "<"
pos = s.position()
s.next()
base_type = p_c_base_type(s)
declarator = p_c_declarator(s, empty = 1)
if s.sy == '?':
s.next()
typecheck = 1
else:
typecheck = 0
s.expect(">")
operand = p_factor(s)
return ExprNodes.TypecastNode(pos,
base_type = base_type,
declarator = declarator,
operand = operand,
typecheck = typecheck)
def p_sizeof(s):
# s.sy == ident "sizeof"
pos = s.position()
s.next()
s.expect('(')
if looking_at_type(s) or looking_at_dotted_name(s):
base_type = p_c_base_type(s)
declarator = p_c_declarator(s, empty = 1)
node = ExprNodes.SizeofTypeNode(pos,
base_type = base_type, declarator = declarator)
else:
operand = p_simple_expr(s)
node = ExprNodes.SizeofVarNode(pos, operand = operand)
s.expect(')')
return node
#power: atom trailer* ('**' factor)*
def p_power(s):
n1 = p_atom(s)
while s.sy in ('(', '[', '.'):
n1 = p_trailer(s, n1)
if s.sy == '**':
pos = s.position()
s.next()
n2 = p_factor(s)
n1 = ExprNodes.binop_node(pos, '**', n1, n2)
return n1
#trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
def p_trailer(s, node1):
pos = s.position()
if s.sy == '(':
return p_call(s, node1)
elif s.sy == '[':
return p_index(s, node1)
else: # s.sy == '.'
s.next()
name = p_ident(s)
return ExprNodes.AttributeNode(pos,
obj = node1, attribute = name)
# arglist: argument (',' argument)* [',']
# argument: [test '='] test # Really [keyword '='] test
def p_call(s, function):
# s.sy == '('
pos = s.position()
s.next()
positional_args = []
keyword_args = []
star_arg = None
starstar_arg = None
while s.sy not in ('*', '**', ')'):
arg = p_simple_expr(s)
if s.sy == '=':
s.next()
if not arg.is_name:
s.error("Expected an identifier before '='",
pos = arg.pos)
keyword = ExprNodes.StringNode(arg.pos,
value = arg.name)
arg = p_simple_expr(s)
keyword_args.append((keyword, arg))
else:
if keyword_args:
s.error("Non-keyword arg following keyword arg",
pos = arg.pos)
positional_args.append(arg)
if s.sy != ',':
break
s.next()
if s.sy == '*':
s.next()
star_arg = p_simple_expr(s)
if s.sy == ',':
s.next()
if s.sy == '**':
s.next()
starstar_arg = p_simple_expr(s)
if s.sy == ',':
s.next()
s.expect(')')
if not (keyword_args or star_arg or starstar_arg):
return ExprNodes.SimpleCallNode(pos,
function = function,
args = positional_args)
else:
arg_tuple = None
keyword_dict = None
if positional_args or not star_arg:
arg_tuple = ExprNodes.TupleNode(pos,
args = positional_args)
if star_arg:
star_arg_tuple = ExprNodes.AsTupleNode(pos, arg = star_arg)
if arg_tuple:
arg_tuple = ExprNodes.binop_node(pos,
operator = '+', operand1 = arg_tuple,
operand2 = star_arg_tuple)
else:
arg_tuple = star_arg_tuple
if keyword_args:
keyword_args = [ExprNodes.DictItemNode(pos=key.pos, key=key, value=value)
for key, value in keyword_args]
keyword_dict = ExprNodes.DictNode(pos,
key_value_pairs = keyword_args)
return ExprNodes.GeneralCallNode(pos,
function = function,
positional_args = arg_tuple,
keyword_args = keyword_dict,
starstar_arg = starstar_arg)
#lambdef: 'lambda' [varargslist] ':' test
#subscriptlist: subscript (',' subscript)* [',']
def p_index(s, base):
# s.sy == '['
pos = s.position()
s.next()
subscripts = p_subscript_list(s)
if len(subscripts) == 1 and len(subscripts[0]) == 2:
start, stop = subscripts[0]
result = ExprNodes.SliceIndexNode(pos,
base = base, start = start, stop = stop)
else:
indexes = make_slice_nodes(pos, subscripts)
if len(indexes) == 1:
index = indexes[0]
else:
index = ExprNodes.TupleNode(pos, args = indexes)
result = ExprNodes.IndexNode(pos,
base = base, index = index)
s.expect(']')
return result
def p_subscript_list(s):
items = [p_subscript(s)]
while s.sy == ',':
s.next()
if s.sy == ']':
break
items.append(p_subscript(s))
return items
#subscript: '.' '.' '.' | test | [test] ':' [test] [':' [test]]
def p_subscript(s):
# Parse a subscript and return a list of
# 1, 2 or 3 ExprNodes, depending on how
# many slice elements were encountered.
pos = s.position()
if s.sy == '.':
expect_ellipsis(s)
return [ExprNodes.EllipsisNode(pos)]
else:
start = p_slice_element(s, (':',))
if s.sy != ':':
return [start]
s.next()
stop = p_slice_element(s, (':', ',', ']'))
if s.sy != ':':
return [start, stop]
s.next()
step = p_slice_element(s, (':', ',', ']'))
return [start, stop, step]
def p_slice_element(s, follow_set):
# Simple expression which may be missing iff
# it is followed by something in follow_set.
if s.sy not in follow_set:
return p_simple_expr(s)
else:
return None
def expect_ellipsis(s):
s.expect('.')
s.expect('.')
s.expect('.')
def make_slice_nodes(pos, subscripts):
# Convert a list of subscripts as returned
# by p_subscript_list into a list of ExprNodes,
# creating SliceNodes for elements with 2 or
# more components.
result = []
for subscript in subscripts:
if len(subscript) == 1:
result.append(subscript[0])
else:
result.append(make_slice_node(pos, *subscript))
return result
def make_slice_node(pos, start, stop = None, step = None):
if not start:
start = ExprNodes.NoneNode(pos)
if not stop:
stop = ExprNodes.NoneNode(pos)
if not step:
step = ExprNodes.NoneNode(pos)
return ExprNodes.SliceNode(pos,
start = start, stop = stop, step = step)
#atom: '(' [testlist] ')' | '[' [listmaker] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING+
def p_atom(s):
pos = s.position()
sy = s.sy
if sy == '(':
s.next()
if s.sy == ')':
result = ExprNodes.TupleNode(pos, args = [])
else:
result = p_expr(s)
s.expect(')')
return result
elif sy == '[':
return p_list_maker(s)
elif sy == '{':
return p_dict_maker(s)
elif sy == '`':
return p_backquote_expr(s)
elif sy == 'INT':
value = s.systring
s.next()
return ExprNodes.IntNode(pos, value = value)
elif sy == 'LONG':
value = s.systring
s.next()
return ExprNodes.LongNode(pos, value = value)
elif sy == 'FLOAT':
value = s.systring
s.next()
return ExprNodes.FloatNode(pos, value = value)
elif sy == 'IMAG':
value = s.systring[:-1]
s.next()
return ExprNodes.ImagNode(pos, value = value)
elif sy == 'STRING' or sy == 'BEGIN_STRING':
kind, value = p_cat_string_literal(s)
if kind == 'c':
return ExprNodes.CharNode(pos, value = value)
else:
return ExprNodes.StringNode(pos, value = value)
elif sy == 'IDENT':
name = s.systring
s.next()
if name == "None":
return ExprNodes.NoneNode(pos)
elif name == "True":
return ExprNodes.BoolNode(pos, value=True)
elif name == "False":
return ExprNodes.BoolNode(pos, value=False)
else:
return p_name(s, name)
elif sy == 'NULL':
s.next()
return ExprNodes.NullNode(pos)
else:
s.error("Expected an identifier or literal")
def p_name(s, name):
pos = s.position()
if not s.compile_time_expr:
try:
value = s.compile_time_env.lookup_here(name)
except KeyError:
pass
else:
rep = repr(value)
if isinstance(value, bool):
return ExprNodes.BoolNode(pos, value = value)
elif isinstance(value, int):
return ExprNodes.IntNode(pos, value = rep)
elif isinstance(value, long):
return ExprNodes.LongNode(pos, value = rep)
elif isinstance(value, float):
return ExprNodes.FloatNode(pos, value = rep)
elif isinstance(value, str):
return ExprNodes.StringNode(pos, value = rep[1:-1])
else:
error(pos, "Invalid type for compile-time constant: %s"
% value.__class__.__name__)
return ExprNodes.NameNode(pos, name = name)
def p_cat_string_literal(s):
# A sequence of one or more adjacent string literals.
# Returns (kind, value) where kind in ('', 'c', 'r')
kind, value = p_string_literal(s)
if kind != 'c':
strings = [value]
while s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
next_kind, next_value = p_string_literal(s)
if next_kind == 'c':
self.error(
"Cannot concatenate char literal with another string or char literal")
strings.append(next_value)
value = ''.join(strings)
return kind, value
def p_opt_string_literal(s):
if s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
return p_string_literal(s)
else:
return None
def p_string_literal(s):
# A single string or char literal.
# Returns (kind, value) where kind in ('', 'c', 'r', 'u')
if s.sy == 'STRING':
value = unquote(s.systring)
s.next()
return value
# s.sy == 'BEGIN_STRING'
pos = s.position()
#is_raw = s.systring[:1].lower() == "r"
kind = s.systring[:1].lower()
if kind not in "cru":
kind = ''
chars = []
while 1:
s.next()
sy = s.sy
#print "p_string_literal: sy =", sy, repr(s.systring) ###
if sy == 'CHARS':
systr = s.systring
if len(systr) == 1 and systr in "'\"\n":
chars.append('\\')
if kind == 'u' and not isinstance(systr, unicode):
systr = systr.decode("UTF-8")
chars.append(systr)
elif sy == 'ESCAPE':
systr = s.systring
if kind == 'r':
if systr == '\\\n':
chars.append(r'\\\n')
elif systr == r'\"':
chars.append(r'\\\"')
elif systr == r'\\':
chars.append(r'\\\\')
else:
chars.append('\\' + systr)
else:
c = systr[1]
if c in "'\"\\abfnrtv01234567":
chars.append(systr)
elif c == '\n':
pass
elif c in 'ux':
if kind == 'u':
try:
chars.append(systr.decode('unicode_escape'))
except UnicodeDecodeError:
s.error("Invalid unicode escape '%s'" % systr,
pos = pos)
elif c == 'x':
chars.append('\\x0' + systr[2:])
else:
chars.append(systr)
else:
chars.append(r'\\' + systr[1:])
elif sy == 'NEWLINE':
chars.append(r'\n')
elif sy == 'END_STRING':
break
elif sy == 'EOF':
s.error("Unclosed string literal", pos = pos)
else:
s.error(
"Unexpected token %r:%r in string literal" %
(sy, s.systring))
s.next()
value = ''.join(chars)
#print "p_string_literal: value =", repr(value) ###
return kind, value
def unquote(s):
is_raw = 0
if s[:1].lower() == "r":
is_raw = 1
s = s[1:]
q = s[:3]
if q == '"""' or q == "'''":
s = s[3:-3]
else:
s = s[1:-1]
if is_raw:
s = s.replace('\\', '\\\\')
s = s.replace('\n', '\\\n')
else:
# Split into double quotes, newlines, escape sequences
# and spans of regular chars
l1 = re.split(r'((?:\\[0-7]{1,3})|(?:\\x[0-9A-Fa-f]{2})|(?:\\.)|(?:\\\n)|(?:\n)|")', s)
#print "unquote: l1 =", l1 ###
l2 = []
for item in l1:
if item == '"' or item == '\n':
l2.append('\\' + item)
elif item == '\\\n':
pass
elif item[:1] == '\\':
if len(item) == 2:
if item[1] in '"\\abfnrtv':
l2.append(item)
else:
l2.append(item[1])
elif item[1:2] == 'x':
l2.append('\\x0' + item[2:])
else:
# octal escape
l2.append(item)
else:
l2.append(item)
s = "".join(l2)
return s
# list_display ::= "[" [listmaker] "]"
# listmaker ::= expression ( list_for | ( "," expression )* [","] )
# list_iter ::= list_for | list_if
# list_for ::= "for" expression_list "in" testlist [list_iter]
# list_if ::= "if" test [list_iter]
def p_list_maker(s):
# s.sy == '['
pos = s.position()
s.next()
if s.sy == ']':
s.expect(']')
return ExprNodes.ListNode(pos, args = [])
expr = p_simple_expr(s)
if s.sy == 'for':
loop = p_list_for(s)
s.expect(']')
inner_loop = loop
while not isinstance(inner_loop.body, Nodes.PassStatNode):
inner_loop = inner_loop.body
if isinstance(inner_loop, Nodes.IfStatNode):
inner_loop = inner_loop.if_clauses[0]
append = ExprNodes.ListComprehensionAppendNode( pos, expr = expr )
inner_loop.body = Nodes.ExprStatNode(pos, expr = append)
return ExprNodes.ListComprehensionNode(pos, loop = loop, append = append)
else:
exprs = [expr]
if s.sy == ',':
s.next()
exprs += p_simple_expr_list(s)
s.expect(']')
return ExprNodes.ListNode(pos, args = exprs)
def p_list_iter(s):
if s.sy == 'for':
return p_list_for(s)
elif s.sy == 'if':
return p_list_if(s)
else:
return Nodes.PassStatNode(s.position())
def p_list_for(s):
# s.sy == 'for'
pos = s.position()
s.next()
kw = p_for_bounds(s)
kw['else_clause'] = None
kw['body'] = p_list_iter(s)
return Nodes.ForStatNode(pos, **kw)
def p_list_if(s):
# s.sy == 'if'
pos = s.position()
s.next()
test = p_test(s)
return Nodes.IfStatNode(pos,
if_clauses = [Nodes.IfClauseNode(pos, condition = test, body = p_list_iter(s))],
else_clause = None )
#dictmaker: test ':' test (',' test ':' test)* [',']
def p_dict_maker(s):
# s.sy == '{'
pos = s.position()
s.next()
items = []
while s.sy != '}':
items.append(p_dict_item(s))
if s.sy != ',':
break
s.next()
s.expect('}')
return ExprNodes.DictNode(pos, key_value_pairs = items)
def p_dict_item(s):
key = p_simple_expr(s)
s.expect(':')
value = p_simple_expr(s)
return ExprNodes.DictItemNode(key.pos, key=key, value=value)
def p_backquote_expr(s):
# s.sy == '`'
pos = s.position()
s.next()
arg = p_expr(s)
s.expect('`')
return ExprNodes.BackquoteNode(pos, arg = arg)
def p_simple_expr_list(s):
exprs = []
while s.sy not in expr_terminators:
exprs.append(p_simple_expr(s))
if s.sy != ',':
break
s.next()
return exprs
def p_expr(s):
pos = s.position()
expr = p_simple_expr(s)
if s.sy == ',':
s.next()
exprs = [expr] + p_simple_expr_list(s)
return ExprNodes.TupleNode(pos, args = exprs)
else:
return expr
#testlist: test (',' test)* [',']
# differs from p_expr only in the fact that it cannot contain conditional expressions
def p_testlist(s):
pos = s.position()
expr = p_test(s)
if s.sy == ',':
exprs = [expr]
while s.sy == ',':
s.next()
exprs.append(p_test(s))
return ExprNodes.TupleNode(pos, args = exprs)
else:
return expr
expr_terminators = (')', ']', '}', ':', '=', 'NEWLINE')
#-------------------------------------------------------
#
# Statements
#
#-------------------------------------------------------
def p_global_statement(s):
# assume s.sy == 'global'
pos = s.position()
s.next()
names = p_ident_list(s)
return Nodes.GlobalNode(pos, names = names)
def p_expression_or_assignment(s):
expr_list = [p_expr(s)]
while s.sy == '=':
s.next()
expr_list.append(p_expr(s))
if len(expr_list) == 1:
if re.match("[+*/\%^\&|-]=", s.sy):
lhs = expr_list[0]
if not isinstance(lhs, (ExprNodes.AttributeNode, ExprNodes.IndexNode, ExprNodes.NameNode) ):
error(lhs.pos, "Illegal operand for inplace operation.")
operator = s.sy[0]
s.next()
rhs = p_expr(s)
return Nodes.InPlaceAssignmentNode(lhs.pos, operator = operator, lhs = lhs, rhs = rhs)
expr = expr_list[0]
if isinstance(expr, ExprNodes.StringNode):
return Nodes.PassStatNode(expr.pos)
else:
return Nodes.ExprStatNode(expr.pos, expr = expr)
else:
expr_list_list = []
flatten_parallel_assignments(expr_list, expr_list_list)
nodes = []
for expr_list in expr_list_list:
lhs_list = expr_list[:-1]
rhs = expr_list[-1]
if len(lhs_list) == 1:
node = Nodes.SingleAssignmentNode(rhs.pos,
lhs = lhs_list[0], rhs = rhs)
else:
node = Nodes.CascadedAssignmentNode(rhs.pos,
lhs_list = lhs_list, rhs = rhs)
nodes.append(node)
if len(nodes) == 1:
return nodes[0]
else:
return Nodes.ParallelAssignmentNode(nodes[0].pos, stats = nodes)
def flatten_parallel_assignments(input, output):
# The input is a list of expression nodes, representing
# the LHSs and RHS of one (possibly cascaded) assignment
# statement. If they are all sequence constructors with
# the same number of arguments, rearranges them into a
# list of equivalent assignments between the individual
# elements. This transformation is applied recursively.
size = find_parallel_assignment_size(input)
if size >= 0:
for i in range(size):
new_exprs = [expr.args[i] for expr in input]
flatten_parallel_assignments(new_exprs, output)
else:
output.append(input)
def find_parallel_assignment_size(input):
# The input is a list of expression nodes. If
# they are all sequence constructors with the same number
# of arguments, return that number, else return -1.
# Produces an error message if they are all sequence
# constructors but not all the same size.
for expr in input:
if not expr.is_sequence_constructor:
return -1
rhs = input[-1]
rhs_size = len(rhs.args)
for lhs in input[:-1]:
lhs_size = len(lhs.args)
if lhs_size != rhs_size:
error(lhs.pos, "Unpacking sequence of wrong size (expected %d, got %d)"
% (lhs_size, rhs_size))
return -1
return rhs_size
def p_print_statement(s):
# s.sy == 'print'
pos = s.position()
s.next()
if s.sy == '>>':
s.error("'print >>' not yet implemented")
args = []
ewc = 0
if s.sy not in ('NEWLINE', 'EOF'):
args.append(p_simple_expr(s))
while s.sy == ',':
s.next()
if s.sy in ('NEWLINE', 'EOF'):
ewc = 1
break
args.append(p_simple_expr(s))
return Nodes.PrintStatNode(pos,
args = args, ends_with_comma = ewc)
def p_del_statement(s):
# s.sy == 'del'
pos = s.position()
s.next()
args = p_simple_expr_list(s)
return Nodes.DelStatNode(pos, args = args)
def p_pass_statement(s, with_newline = 0):
pos = s.position()
s.expect('pass')
if with_newline:
s.expect_newline("Expected a newline")
return Nodes.PassStatNode(pos)
def p_break_statement(s):
# s.sy == 'break'
pos = s.position()
s.next()
return Nodes.BreakStatNode(pos)
def p_continue_statement(s):
# s.sy == 'continue'
pos = s.position()
s.next()
return Nodes.ContinueStatNode(pos)
def p_return_statement(s):
# s.sy == 'return'
pos = s.position()
s.next()
if s.sy not in statement_terminators:
value = p_expr(s)
else:
value = None
return Nodes.ReturnStatNode(pos, value = value)
def p_raise_statement(s):
# s.sy == 'raise'
pos = s.position()
s.next()
exc_type = None
exc_value = None
exc_tb = None
if s.sy not in statement_terminators:
exc_type = p_simple_expr(s)
if s.sy == ',':
s.next()
exc_value = p_simple_expr(s)
if s.sy == ',':
s.next()
exc_tb = p_simple_expr(s)
if exc_type or exc_value or exc_tb:
return Nodes.RaiseStatNode(pos,
exc_type = exc_type,
exc_value = exc_value,
exc_tb = exc_tb)
else:
return Nodes.ReraiseStatNode(pos)
def p_import_statement(s):
# s.sy in ('import', 'cimport')
pos = s.position()
kind = s.sy
s.next()
items = [p_dotted_name(s, as_allowed = 1)]
while s.sy == ',':
s.next()
items.append(p_dotted_name(s, as_allowed = 1))
stats = []
for pos, target_name, dotted_name, as_name in items:
if kind == 'cimport':
stat = Nodes.CImportStatNode(pos,
module_name = dotted_name,
as_name = as_name)
else:
if as_name and "." in dotted_name:
name_list = ExprNodes.ListNode(pos, args = [
ExprNodes.StringNode(pos, value = "*")])
else:
name_list = None
stat = Nodes.SingleAssignmentNode(pos,
lhs = ExprNodes.NameNode(pos,
name = as_name or target_name),
rhs = ExprNodes.ImportNode(pos,
module_name = ExprNodes.StringNode(pos,
value = dotted_name),
name_list = name_list))
stats.append(stat)
return Nodes.StatListNode(pos, stats = stats)
def p_from_import_statement(s):
# s.sy == 'from'
pos = s.position()
s.next()
(dotted_name_pos, _, dotted_name, _) = \
p_dotted_name(s, as_allowed = 0)
if s.sy in ('import', 'cimport'):
kind = s.sy
s.next()
else:
s.error("Expected 'import' or 'cimport'")
if s.sy == '*':
s.error("'import *' not supported")
imported_names = [p_imported_name(s)]
while s.sy == ',':
s.next()
imported_names.append(p_imported_name(s))
if kind == 'cimport':
for (name_pos, name, as_name) in imported_names:
local_name = as_name or name
s.add_type_name(local_name)
return Nodes.FromCImportStatNode(pos,
module_name = dotted_name,
imported_names = imported_names)
else:
imported_name_strings = []
items = []
for (name_pos, name, as_name) in imported_names:
imported_name_strings.append(
ExprNodes.StringNode(name_pos, value = name))
items.append(
(name,
ExprNodes.NameNode(name_pos,
name = as_name or name)))
import_list = ExprNodes.ListNode(
imported_names[0][0], args = imported_name_strings)
return Nodes.FromImportStatNode(pos,
module = ExprNodes.ImportNode(dotted_name_pos,
module_name = ExprNodes.StringNode(dotted_name_pos,
value = dotted_name),
name_list = import_list),
items = items)
def p_imported_name(s):
pos = s.position()
name = p_ident(s)
as_name = p_as_name(s)
return (pos, name, as_name)
def p_dotted_name(s, as_allowed):
pos = s.position()
target_name = p_ident(s)
as_name = None
names = [target_name]
while s.sy == '.':
s.next()
names.append(p_ident(s))
if as_allowed:
as_name = p_as_name(s)
return (pos, target_name, join(names, "."), as_name)
def p_as_name(s):
if s.sy == 'IDENT' and s.systring == 'as':
s.next()
return p_ident(s)
else:
return None
def p_assert_statement(s):
# s.sy == 'assert'
pos = s.position()
s.next()
cond = p_simple_expr(s)
if s.sy == ',':
s.next()
value = p_simple_expr(s)
else:
value = None
return Nodes.AssertStatNode(pos, cond = cond, value = value)
statement_terminators = (';', 'NEWLINE', 'EOF')
def p_if_statement(s):
# s.sy == 'if'
pos = s.position()
s.next()
if_clauses = [p_if_clause(s)]
while s.sy == 'elif':
s.next()
if_clauses.append(p_if_clause(s))
else_clause = p_else_clause(s)
return Nodes.IfStatNode(pos,
if_clauses = if_clauses, else_clause = else_clause)
def p_if_clause(s):
pos = s.position()
test = p_simple_expr(s)
body = p_suite(s)
return Nodes.IfClauseNode(pos,
condition = test, body = body)
def p_else_clause(s):
if s.sy == 'else':
s.next()
return p_suite(s)
else:
return None
def p_while_statement(s):
# s.sy == 'while'
pos = s.position()
s.next()
test = p_simple_expr(s)
body = p_suite(s)
else_clause = p_else_clause(s)
return Nodes.WhileStatNode(pos,
condition = test, body = body,
else_clause = else_clause)
def p_for_statement(s):
# s.sy == 'for'
pos = s.position()
s.next()
kw = p_for_bounds(s)
kw['body'] = p_suite(s)
kw['else_clause'] = p_else_clause(s)
return Nodes.ForStatNode(pos, **kw)
def p_for_bounds(s):
target = p_for_target(s)
if s.sy == 'in':
s.next()
iterator = p_for_iterator(s)
return { 'target': target, 'iterator': iterator }
elif s.sy == 'from':
s.next()
bound1 = p_bit_expr(s)
rel1 = p_for_from_relation(s)
name2_pos = s.position()
name2 = p_ident(s)
rel2_pos = s.position()
rel2 = p_for_from_relation(s)
bound2 = p_bit_expr(s)
step = p_for_from_step(s)
if not target.is_name:
error(target.pos,
"Target of for-from statement must be a variable name")
elif name2 != target.name:
error(name2_pos,
"Variable name in for-from range does not match target")
if rel1[0] != rel2[0]:
error(rel2_pos,
"Relation directions in for-from do not match")
return {'target': target,
'bound1': bound1,
'relation1': rel1,
'relation2': rel2,
'bound2': bound2,
'step': step }
def p_for_from_relation(s):
if s.sy in inequality_relations:
op = s.sy
s.next()
return op
else:
s.error("Expected one of '<', '<=', '>' '>='")
def p_for_from_step(s):
if s.sy == 'by':
s.next()
step = p_bit_expr(s)
return step
else:
return None
inequality_relations = ('<', '<=', '>', '>=')
def p_for_target(s):
pos = s.position()
expr = p_bit_expr(s)
if s.sy == ',':
s.next()
exprs = [expr]
while s.sy != 'in':
exprs.append(p_bit_expr(s))
if s.sy != ',':
break
s.next()
return ExprNodes.TupleNode(pos, args = exprs)
else:
return expr
def p_for_iterator(s):
pos = s.position()
expr = p_testlist(s)
return ExprNodes.IteratorNode(pos, sequence = expr)
def p_try_statement(s):
# s.sy == 'try'
pos = s.position()
s.next()
body = p_suite(s)
except_clauses = []
else_clause = None
if s.sy in ('except', 'else'):
while s.sy == 'except':
except_clauses.append(p_except_clause(s))
if s.sy == 'else':
s.next()
else_clause = p_suite(s)
body = Nodes.TryExceptStatNode(pos,
body = body, except_clauses = except_clauses,
else_clause = else_clause)
if s.sy != 'finally':
return body
# try-except-finally is equivalent to nested try-except/try-finally
if s.sy == 'finally':
s.next()
finally_clause = p_suite(s)
return Nodes.TryFinallyStatNode(pos,
body = body, finally_clause = finally_clause)
else:
s.error("Expected 'except' or 'finally'")
def p_except_clause(s):
# s.sy == 'except'
pos = s.position()
s.next()
exc_type = None
exc_value = None
if s.sy != ':':
exc_type = p_simple_expr(s)
if s.sy == ',':
s.next()
exc_value = p_simple_expr(s)
body = p_suite(s)
return Nodes.ExceptClauseNode(pos,
pattern = exc_type, target = exc_value, body = body)
def p_include_statement(s, level):
pos = s.position()
s.next() # 'include'
_, include_file_name = p_string_literal(s)
s.expect_newline("Syntax error in include statement")
if s.compile_time_eval:
include_file_path = s.context.find_include_file(include_file_name, pos)
if include_file_path:
f = open(include_file_path, "rU")
s2 = PyrexScanner(f, include_file_path, s)
try:
tree = p_statement_list(s2, level)
finally:
f.close()
return tree
else:
return None
else:
return Nodes.PassStatNode(pos)
def p_with_statement(s):
pos = s.position()
s.next() # 'with'
# if s.sy == 'IDENT' and s.systring in ('gil', 'nogil'):
if s.sy == 'IDENT' and s.systring == 'nogil':
state = s.systring
s.next()
body = p_suite(s)
return Nodes.GILStatNode(pos, state = state, body = body)
else:
s.error("Only 'with gil' and 'with nogil' implemented",
pos = pos)
def p_simple_statement(s):
#print "p_simple_statement:", s.sy, s.systring ###
if s.sy == 'global':
node = p_global_statement(s)
elif s.sy == 'print':
node = p_print_statement(s)
elif s.sy == 'del':
node = p_del_statement(s)
elif s.sy == 'break':
node = p_break_statement(s)
elif s.sy == 'continue':
node = p_continue_statement(s)
elif s.sy == 'return':
node = p_return_statement(s)
elif s.sy == 'raise':
node = p_raise_statement(s)
elif s.sy in ('import', 'cimport'):
node = p_import_statement(s)
elif s.sy == 'from':
node = p_from_import_statement(s)
elif s.sy == 'assert':
node = p_assert_statement(s)
elif s.sy == 'pass':
node = p_pass_statement(s)
else:
node = p_expression_or_assignment(s)
return node
def p_simple_statement_list(s):
# Parse a series of simple statements on one line
# separated by semicolons.
stat = p_simple_statement(s)
if s.sy == ';':
stats = [stat]
while s.sy == ';':
#print "p_simple_statement_list: maybe more to follow" ###
s.next()
if s.sy in ('NEWLINE', 'EOF'):
break
stats.append(p_simple_statement(s))
stat = Nodes.StatListNode(stats[0].pos, stats = stats)
s.expect_newline("Syntax error in simple statement list")
return stat
def p_compile_time_expr(s):
old = s.compile_time_expr
s.compile_time_expr = 1
expr = p_expr(s)
s.compile_time_expr = old
return expr
def p_DEF_statement(s):
pos = s.position()
denv = s.compile_time_env
s.next() # 'DEF'
name = p_ident(s)
s.expect('=')
expr = p_compile_time_expr(s)
value = expr.compile_time_value(denv)
#print "p_DEF_statement: %s = %r" % (name, value) ###
denv.declare(name, value)
s.expect_newline()
return Nodes.PassStatNode(pos)
def p_IF_statement(s, level, cdef_flag, visibility, api):
pos = s.position
saved_eval = s.compile_time_eval
current_eval = saved_eval
denv = s.compile_time_env
result = None
while 1:
s.next() # 'IF' or 'ELIF'
expr = p_compile_time_expr(s)
s.compile_time_eval = current_eval and bool(expr.compile_time_value(denv))
body = p_suite(s, level, cdef_flag, visibility, api = api)
if s.compile_time_eval:
result = body
current_eval = 0
if s.sy != 'ELIF':
break
if s.sy == 'ELSE':
s.next()
s.compile_time_eval = current_eval
body = p_suite(s, level, cdef_flag, visibility, api = api)
if current_eval:
result = body
if not result:
result = Nodes.PassStatNode(pos)
s.compile_time_eval = saved_eval
return result
def p_statement(s, level, cdef_flag = 0, visibility = 'private', api = 0):
if s.sy == 'ctypedef':
if level not in ('module', 'module_pxd'):
s.error("ctypedef statement not allowed here")
if api:
error(s.position(), "'api' not allowed with 'ctypedef'")
return p_ctypedef_statement(s, level, visibility, api)
elif s.sy == 'DEF':
return p_DEF_statement(s)
elif s.sy == 'IF':
return p_IF_statement(s, level, cdef_flag, visibility, api)
else:
overridable = 0
if s.sy == 'cdef':
cdef_flag = 1
s.next()
if s.sy == 'cpdef':
cdef_flag = 1
overridable = 1
s.next()
if cdef_flag:
if level not in ('module', 'module_pxd', 'function', 'c_class', 'c_class_pxd'):
s.error('cdef statement not allowed here')
s.level = level
return p_cdef_statement(s, level, visibility = visibility,
api = api, overridable = overridable)
# elif s.sy == 'cpdef':
# s.next()
# return p_c_func_or_var_declaration(s, level, s.position(), visibility = visibility, api = api, overridable = True)
else:
if api:
error(s.pos, "'api' not allowed with this statement")
elif s.sy == 'def':
if level not in ('module', 'class', 'c_class', 'property'):
s.error('def statement not allowed here')
s.level = level
return p_def_statement(s)
elif s.sy == 'class':
if level != 'module':
s.error("class definition not allowed here")
return p_class_statement(s)
elif s.sy == 'include':
if level not in ('module', 'module_pxd'):
s.error("include statement not allowed here")
return p_include_statement(s, level)
elif level == 'c_class' and s.sy == 'IDENT' and s.systring == 'property':
return p_property_decl(s)
elif s.sy == 'pass' and level != 'property':
return p_pass_statement(s, with_newline = 1)
else:
if level in ('c_class_pxd', 'property'):
s.error("Executable statement not allowed here")
if s.sy == 'if':
return p_if_statement(s)
elif s.sy == 'while':
return p_while_statement(s)
elif s.sy == 'for':
return p_for_statement(s)
elif s.sy == 'try':
return p_try_statement(s)
elif s.sy == 'with':
return p_with_statement(s)
else:
return p_simple_statement_list(s)
def p_statement_list(s, level,
cdef_flag = 0, visibility = 'private', api = 0):
# Parse a series of statements separated by newlines.
pos = s.position()
stats = []
while s.sy not in ('DEDENT', 'EOF'):
stats.append(p_statement(s, level,
cdef_flag = cdef_flag, visibility = visibility, api = api))
if len(stats) == 1:
return stats[0]
else:
return Nodes.StatListNode(pos, stats = stats)
def p_suite(s, level = 'other', cdef_flag = 0,
visibility = 'private', with_doc = 0, with_pseudo_doc = 0, api = 0):
pos = s.position()
s.expect(':')
doc = None
stmts = []
if s.sy == 'NEWLINE':
s.next()
s.expect_indent()
if with_doc or with_pseudo_doc:
doc = p_doc_string(s)
body = p_statement_list(s,
level = level,
cdef_flag = cdef_flag,
visibility = visibility,
api = api)
s.expect_dedent()
else:
if api:
error(s.pos, "'api' not allowed with this statement")
if level in ('module', 'class', 'function', 'other'):
body = p_simple_statement_list(s)
else:
body = p_pass_statement(s)
s.expect_newline("Syntax error in declarations")
if with_doc:
return doc, body
else:
return body
def p_c_base_type(s, self_flag = 0):
# If self_flag is true, this is the base type for the
# self argument of a C method of an extension type.
if s.sy == '(':
return p_c_complex_base_type(s)
else:
return p_c_simple_base_type(s, self_flag)
def p_calling_convention(s):
if s.sy == 'IDENT' and s.systring in calling_convention_words:
result = s.systring
s.next()
return result
else:
return ""
calling_convention_words = ("__stdcall", "__cdecl")
def p_c_complex_base_type(s):
# s.sy == '('
pos = s.position()
s.next()
base_type = p_c_base_type(s)
declarator = p_c_declarator(s, empty = 1)
s.expect(')')
return Nodes.CComplexBaseTypeNode(pos,
base_type = base_type, declarator = declarator)
def p_c_simple_base_type(s, self_flag):
#print "p_c_simple_base_type: self_flag =", self_flag
is_basic = 0
signed = 1
longness = 0
module_path = []
pos = s.position()
if looking_at_base_type(s):
#print "p_c_simple_base_type: looking_at_base_type at", s.position()
is_basic = 1
signed, longness = p_sign_and_longness(s)
if s.sy == 'IDENT' and s.systring in basic_c_type_names:
name = s.systring
s.next()
else:
name = 'int'
elif s.looking_at_type_name() or looking_at_dotted_name(s):
#print "p_c_simple_base_type: looking_at_type_name at", s.position()
name = s.systring
s.next()
while s.sy == '.':
module_path.append(name)
s.next()
name = p_ident(s)
else:
#print "p_c_simple_base_type: not looking at type at", s.position()
name = None
return Nodes.CSimpleBaseTypeNode(pos,
name = name, module_path = module_path,
is_basic_c_type = is_basic, signed = signed,
longness = longness, is_self_arg = self_flag)
def looking_at_type(s):
return looking_at_base_type(s) or s.looking_at_type_name()
def looking_at_base_type(s):
#print "looking_at_base_type?", s.sy, s.systring, s.position()
return s.sy == 'IDENT' and s.systring in base_type_start_words
def looking_at_dotted_name(s):
if s.sy == 'IDENT':
name = s.systring
s.next()
result = s.sy == '.'
s.put_back('IDENT', name)
return result
else:
return 0
basic_c_type_names = ("void", "char", "int", "float", "double", "Py_ssize_t", "bint")
sign_and_longness_words = ("short", "long", "signed", "unsigned")
base_type_start_words = \
basic_c_type_names + sign_and_longness_words
def p_sign_and_longness(s):
signed = 1
longness = 0
while s.sy == 'IDENT' and s.systring in sign_and_longness_words:
if s.systring == 'unsigned':
signed = 0
elif s.systring == 'signed':
signed = 2
elif s.systring == 'short':
longness = -1
elif s.systring == 'long':
longness += 1
s.next()
return signed, longness
def p_opt_cname(s):
literal = p_opt_string_literal(s)
if literal:
_, cname = literal
else:
cname = None
return cname
def p_c_declarator(s, empty = 0, is_type = 0, cmethod_flag = 0, assignable = 0,
nonempty = 0, calling_convention_allowed = 0):
# If empty is true, the declarator must be empty. If nonempty is true,
# the declarator must be nonempty. Otherwise we don't care.
# If cmethod_flag is true, then if this declarator declares
# a function, it's a C method of an extension type.
pos = s.position()
if s.sy == '(':
s.next()
if s.sy == ')' or looking_at_type(s):
base = Nodes.CNameDeclaratorNode(pos, name = "", cname = None)
result = p_c_func_declarator(s, pos, base, cmethod_flag)
else:
result = p_c_declarator(s, empty, is_type, cmethod_flag, nonempty = nonempty,
calling_convention_allowed = 1)
s.expect(')')
else:
result = p_c_simple_declarator(s, empty, is_type, cmethod_flag, assignable, nonempty)
if not calling_convention_allowed and result.calling_convention and s.sy != '(':
error(s.position(), "%s on something that is not a function"
% result.calling_convention)
while s.sy in ('[', '('):
pos = s.position()
if s.sy == '[':
result = p_c_array_declarator(s, result)
else: # sy == '('
s.next()
result = p_c_func_declarator(s, pos, result, cmethod_flag)
cmethod_flag = 0
return result
def p_c_array_declarator(s, base):
pos = s.position()
s.next() # '['
if s.sy != ']':
dim = p_expr(s)
else:
dim = None
s.expect(']')
return Nodes.CArrayDeclaratorNode(pos, base = base, dimension = dim)
def p_c_func_declarator(s, pos, base, cmethod_flag):
# Opening paren has already been skipped
args = p_c_arg_list(s, in_pyfunc = 0, cmethod_flag = cmethod_flag,
nonempty_declarators = 0)
ellipsis = p_optional_ellipsis(s)
s.expect(')')
nogil = p_nogil(s)
exc_val, exc_check = p_exception_value_clause(s)
with_gil = p_with_gil(s)
return Nodes.CFuncDeclaratorNode(pos,
base = base, args = args, has_varargs = ellipsis,
exception_value = exc_val, exception_check = exc_check,
nogil = nogil or with_gil, with_gil = with_gil)
def p_c_simple_declarator(s, empty, is_type, cmethod_flag, assignable, nonempty):
pos = s.position()
calling_convention = p_calling_convention(s)
if s.sy == '*':
s.next()
base = p_c_declarator(s, empty, is_type, cmethod_flag, assignable, nonempty)
result = Nodes.CPtrDeclaratorNode(pos,
base = base)
elif s.sy == '**': # scanner returns this as a single token
s.next()
base = p_c_declarator(s, empty, is_type, cmethod_flag, assignable, nonempty)
result = Nodes.CPtrDeclaratorNode(pos,
base = Nodes.CPtrDeclaratorNode(pos,
base = base))
else:
rhs = None
if s.sy == 'IDENT':
name = s.systring
if is_type:
s.add_type_name(name)
if empty:
error(s.position(), "Declarator should be empty")
s.next()
cname = p_opt_cname(s)
if s.sy == '=' and assignable:
s.next()
rhs = p_simple_expr(s)
else:
if nonempty:
error(s.position(), "Empty declarator")
name = ""
cname = None
result = Nodes.CNameDeclaratorNode(pos,
name = name, cname = cname, rhs = rhs)
result.calling_convention = calling_convention
return result
def p_nogil(s):
if s.sy == 'IDENT' and s.systring == 'nogil':
s.next()
return 1
else:
return 0
def p_with_gil(s):
if s.sy == 'with':
s.next()
s.expect_keyword('gil')
return 1
else:
return 0
def p_exception_value_clause(s):
exc_val = None
exc_check = 0
if s.sy == 'except':
s.next()
if s.sy == '*':
exc_check = 1
s.next()
elif s.sy == '+':
exc_check = '+'
s.next()
if s.sy == 'IDENT':
name = s.systring
s.next()
exc_val = p_name(s, name)
else:
if s.sy == '?':
exc_check = 1
s.next()
exc_val = p_simple_expr(s)
return exc_val, exc_check
c_arg_list_terminators = ('*', '**', '.', ')')
#def p_c_arg_list(s, in_pyfunc, cmethod_flag = 0, nonempty_declarators = 0,
# kw_only = 0):
# args = []
# if s.sy not in c_arg_list_terminators:
# args.append(p_c_arg_decl(s, in_pyfunc, cmethod_flag,
# nonempty = nonempty_declarators, kw_only = kw_only))
# while s.sy == ',':
# s.next()
# if s.sy in c_arg_list_terminators:
# break
# args.append(p_c_arg_decl(s, in_pyfunc), nonempty = nonempty_declarators,
# kw_only = kw_only)
# return args
def p_c_arg_list(s, in_pyfunc, cmethod_flag = 0, nonempty_declarators = 0,
kw_only = 0):
# Comma-separated list of C argument declarations, possibly empty.
# May have a trailing comma.
args = []
is_self_arg = cmethod_flag
while s.sy not in c_arg_list_terminators:
args.append(p_c_arg_decl(s, in_pyfunc, is_self_arg,
nonempty = nonempty_declarators, kw_only = kw_only))
if s.sy != ',':
break
s.next()
is_self_arg = 0
return args
def p_optional_ellipsis(s):
if s.sy == '.':
expect_ellipsis(s)
return 1
else:
return 0
def p_c_arg_decl(s, in_pyfunc, cmethod_flag = 0, nonempty = 0, kw_only = 0):
pos = s.position()
not_none = 0
default = None
base_type = p_c_base_type(s, cmethod_flag)
declarator = p_c_declarator(s, nonempty = nonempty)
if s.sy == 'not':
s.next()
if s.sy == 'IDENT' and s.systring == 'None':
s.next()
else:
s.error("Expected 'None'")
if not in_pyfunc:
error(pos, "'not None' only allowed in Python functions")
not_none = 1
if s.sy == '=':
s.next()
if 'pxd' in s.level:
if s.sy not in ['*', '?']:
error(pos, "default values cannot be specified in pxd files, use ? or *")
default = 1
s.next()
else:
default = p_simple_expr(s)
return Nodes.CArgDeclNode(pos,
base_type = base_type,
declarator = declarator,
not_none = not_none,
default = default,
kw_only = kw_only)
def p_api(s):
if s.sy == 'IDENT' and s.systring == 'api':
s.next()
return 1
else:
return 0
def p_cdef_statement(s, level, visibility = 'private', api = 0,
overridable = False):
pos = s.position()
visibility = p_visibility(s, visibility)
api = api or p_api(s)
if api:
if visibility not in ('private', 'public'):
error(pos, "Cannot combine 'api' with '%s'" % visibility)
if (visibility == 'extern') and s.sy == 'from':
return p_cdef_extern_block(s, level, pos)
elif s.sy == 'import':
s.next()
return p_cdef_extern_block(s, level, pos)
elif s.sy == ':':
return p_cdef_block(s, level, visibility, api)
elif s.sy == 'class':
if level not in ('module', 'module_pxd'):
error(pos, "Extension type definition not allowed here")
#if api:
# error(pos, "'api' not allowed with extension class")
return p_c_class_definition(s, level, pos, visibility = visibility, api = api)
elif s.sy == 'IDENT' and s.systring in struct_union_or_enum:
if level not in ('module', 'module_pxd'):
error(pos, "C struct/union/enum definition not allowed here")
#if visibility == 'public':
# error(pos, "Public struct/union/enum definition not implemented")
#if api:
# error(pos, "'api' not allowed with '%s'" % s.systring)
if s.systring == "enum":
return p_c_enum_definition(s, pos, level, visibility)
else:
return p_c_struct_or_union_definition(s, pos, level, visibility)
elif s.sy == 'pass':
node = p_pass_statement(s)
s.expect_newline('Expected a newline')
return node
else:
return p_c_func_or_var_declaration(s, level, pos, visibility, api,
overridable)
def p_cdef_block(s, level, visibility, api):
return p_suite(s, level, cdef_flag = 1, visibility = visibility, api = api)
def p_cdef_extern_block(s, level, pos):
include_file = None
s.expect('from')
if s.sy == '*':
s.next()
else:
_, include_file = p_string_literal(s)
body = p_suite(s, level, cdef_flag = 1, visibility = 'extern')
return Nodes.CDefExternNode(pos,
include_file = include_file,
body = body)
struct_union_or_enum = (
"struct", "union", "enum"
)
def p_c_enum_definition(s, pos, level, visibility, typedef_flag = 0):
# s.sy == ident 'enum'
s.next()
if s.sy == 'IDENT':
name = s.systring
s.next()
s.add_type_name(name)
cname = p_opt_cname(s)
else:
name = None
cname = None
items = None
s.expect(':')
items = []
if s.sy != 'NEWLINE':
p_c_enum_line(s, items)
else:
s.next() # 'NEWLINE'
s.expect_indent()
while s.sy not in ('DEDENT', 'EOF'):
p_c_enum_line(s, items)
s.expect_dedent()
return Nodes.CEnumDefNode(pos, name = name, cname = cname,
items = items, typedef_flag = typedef_flag, visibility = visibility,
in_pxd = level == 'module_pxd')
def p_c_enum_line(s, items):
if s.sy != 'pass':
p_c_enum_item(s, items)
while s.sy == ',':
s.next()
if s.sy in ('NEWLINE', 'EOF'):
break
p_c_enum_item(s, items)
else:
s.next()
s.expect_newline("Syntax error in enum item list")
def p_c_enum_item(s, items):
pos = s.position()
name = p_ident(s)
cname = p_opt_cname(s)
value = None
if s.sy == '=':
s.next()
value = p_simple_expr(s)
items.append(Nodes.CEnumDefItemNode(pos,
name = name, cname = cname, value = value))
def p_c_struct_or_union_definition(s, pos, level, visibility, typedef_flag = 0):
# s.sy == ident 'struct' or 'union'
kind = s.systring
s.next()
name = p_ident(s)
cname = p_opt_cname(s)
s.add_type_name(name)
attributes = None
if s.sy == ':':
s.next()
s.expect('NEWLINE')
s.expect_indent()
attributes = []
while s.sy != 'DEDENT':
if s.sy != 'pass':
attributes.append(
p_c_func_or_var_declaration(s, level = 'other', pos = s.position()))
else:
s.next()
s.expect_newline("Expected a newline")
s.expect_dedent()
else:
s.expect_newline("Syntax error in struct or union definition")
return Nodes.CStructOrUnionDefNode(pos,
name = name, cname = cname, kind = kind, attributes = attributes,
typedef_flag = typedef_flag, visibility = visibility,
in_pxd = level == 'module_pxd')
def p_visibility(s, prev_visibility):
pos = s.position()
visibility = prev_visibility
if s.sy == 'IDENT' and s.systring in ('extern', 'public', 'readonly'):
visibility = s.systring
if prev_visibility != 'private' and visibility != prev_visibility:
s.error("Conflicting visibility options '%s' and '%s'"
% (prev_visibility, visibility))
s.next()
return visibility
def p_c_modifiers(s):
if s.sy == 'IDENT' and s.systring in ('inline',):
modifier = s.systring
s.next()
return [modifier] + p_c_modifiers(s)
return []
def p_c_func_or_var_declaration(s, level, pos, visibility = 'private', api = 0,
overridable = False):
cmethod_flag = level in ('c_class', 'c_class_pxd')
modifiers = p_c_modifiers(s)
base_type = p_c_base_type(s)
declarator = p_c_declarator(s, cmethod_flag = cmethod_flag, assignable = 1, nonempty = 1)
declarator.overridable = overridable
if s.sy == ':':
if level not in ('module', 'c_class'):
s.error("C function definition not allowed here")
doc, suite = p_suite(s, 'function', with_doc = 1)
result = Nodes.CFuncDefNode(pos,
visibility = visibility,
base_type = base_type,
declarator = declarator,
body = suite,
doc = doc,
modifiers = modifiers,
api = api,
overridable = overridable)
else:
#if api:
# error(s.pos, "'api' not allowed with variable declaration")
declarators = [declarator]
while s.sy == ',':
s.next()
if s.sy == 'NEWLINE':
break
declarator = p_c_declarator(s, cmethod_flag = cmethod_flag, assignable = 1, nonempty = 1)
declarators.append(declarator)
s.expect_newline("Syntax error in C variable declaration")
result = Nodes.CVarDefNode(pos,
visibility = visibility,
base_type = base_type,
declarators = declarators,
in_pxd = level == 'module_pxd',
api = api,
overridable = overridable)
return result
def p_ctypedef_statement(s, level, visibility = 'private', api = 0):
# s.sy == 'ctypedef'
pos = s.position()
s.next()
visibility = p_visibility(s, visibility)
if s.sy == 'class':
return p_c_class_definition(s, level, pos,
visibility = visibility, typedef_flag = 1, api = api)
elif s.sy == 'IDENT' and s.systring in ('struct', 'union', 'enum'):
if s.systring == 'enum':
return p_c_enum_definition(s, pos, level, visibility, typedef_flag = 1)
else:
return p_c_struct_or_union_definition(s, pos, level, visibility,
typedef_flag = 1)
else:
base_type = p_c_base_type(s)
declarator = p_c_declarator(s, is_type = 1, nonempty = 1)
s.expect_newline("Syntax error in ctypedef statement")
return Nodes.CTypeDefNode(pos,
base_type = base_type, declarator = declarator, visibility = visibility,
in_pxd = level == 'module_pxd')
def p_def_statement(s):
# s.sy == 'def'
pos = s.position()
s.next()
name = p_ident(s)
#args = []
s.expect('(');
args = p_c_arg_list(s, in_pyfunc = 1, nonempty_declarators = 1)
star_arg = None
starstar_arg = None
if s.sy == '*':
s.next()
if s.sy == 'IDENT':
star_arg = p_py_arg_decl(s)
if s.sy == ',':
s.next()
args.extend(p_c_arg_list(s, in_pyfunc = 1,
nonempty_declarators = 1, kw_only = 1))
elif s.sy != ')':
s.error("Syntax error in Python function argument list")
if s.sy == '**':
s.next()
starstar_arg = p_py_arg_decl(s)
s.expect(')')
if p_nogil(s):
error(s.pos, "Python function cannot be declared nogil")
doc, body = p_suite(s, 'function', with_doc = 1)
return Nodes.DefNode(pos, name = name, args = args,
star_arg = star_arg, starstar_arg = starstar_arg,
doc = doc, body = body)
def p_py_arg_decl(s):
pos = s.position()
name = p_ident(s)
return Nodes.PyArgDeclNode(pos, name = name)
def p_class_statement(s):
# s.sy == 'class'
pos = s.position()
s.next()
class_name = p_ident(s)
if s.sy == '(':
s.next()
base_list = p_simple_expr_list(s)
s.expect(')')
else:
base_list = []
doc, body = p_suite(s, 'class', with_doc = 1)
return Nodes.PyClassDefNode(pos,
name = class_name,
bases = ExprNodes.TupleNode(pos, args = base_list),
doc = doc, body = body)
def p_c_class_definition(s, level, pos,
visibility = 'private', typedef_flag = 0, api = 0):
# s.sy == 'class'
s.next()
module_path = []
class_name = p_ident(s)
while s.sy == '.':
s.next()
module_path.append(class_name)
class_name = p_ident(s)
if module_path and visibility != 'extern':
error(pos, "Qualified class name only allowed for 'extern' C class")
if module_path and s.sy == 'IDENT' and s.systring == 'as':
s.next()
as_name = p_ident(s)
else:
as_name = class_name
s.add_type_name(as_name)
objstruct_name = None
typeobj_name = None
base_class_module = None
base_class_name = None
if s.sy == '(':
s.next()
base_class_path = [p_ident(s)]
while s.sy == '.':
s.next()
base_class_path.append(p_ident(s))
if s.sy == ',':
s.error("C class may only have one base class")
s.expect(')')
base_class_module = ".".join(base_class_path[:-1])
base_class_name = base_class_path[-1]
if s.sy == '[':
if visibility not in ('public', 'extern'):
error(s.position(), "Name options only allowed for 'public' or 'extern' C class")
objstruct_name, typeobj_name = p_c_class_options(s)
if s.sy == ':':
if level == 'module_pxd':
body_level = 'c_class_pxd'
else:
body_level = 'c_class'
doc, body = p_suite(s, body_level, with_doc = 1)
else:
s.expect_newline("Syntax error in C class definition")
doc = None
body = None
if visibility == 'extern':
if not module_path:
error(pos, "Module name required for 'extern' C class")
if typeobj_name:
error(pos, "Type object name specification not allowed for 'extern' C class")
elif visibility == 'public':
if not objstruct_name:
error(pos, "Object struct name specification required for 'public' C class")
if not typeobj_name:
error(pos, "Type object name specification required for 'public' C class")
elif visibility == 'private':
if api:
error(pos, "Only 'public' C class can be declared 'api'")
else:
error(pos, "Invalid class visibility '%s'" % visibility)
return Nodes.CClassDefNode(pos,
visibility = visibility,
typedef_flag = typedef_flag,
api = api,
module_name = ".".join(module_path),
class_name = class_name,
as_name = as_name,
base_class_module = base_class_module,
base_class_name = base_class_name,
objstruct_name = objstruct_name,
typeobj_name = typeobj_name,
in_pxd = level == 'module_pxd',
doc = doc,
body = body)
def p_c_class_options(s):
objstruct_name = None
typeobj_name = None
s.expect('[')
while 1:
if s.sy != 'IDENT':
break
if s.systring == 'object':
s.next()
objstruct_name = p_ident(s)
elif s.systring == 'type':
s.next()
typeobj_name = p_ident(s)
if s.sy != ',':
break
s.next()
s.expect(']', "Expected 'object' or 'type'")
return objstruct_name, typeobj_name
def p_property_decl(s):
pos = s.position()
s.next() # 'property'
name = p_ident(s)
doc, body = p_suite(s, 'property', with_doc = 1)
return Nodes.PropertyNode(pos, name = name, doc = doc, body = body)
def p_doc_string(s):
if s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
_, result = p_cat_string_literal(s)
if s.sy != 'EOF':
s.expect_newline("Syntax error in doc string")
return result
else:
return None
def p_module(s, pxd, full_module_name):
s.add_type_name("object")
pos = s.position()
doc = p_doc_string(s)
if pxd:
level = 'module_pxd'
else:
level = 'module'
body = p_statement_list(s, level)
if s.sy != 'EOF':
s.error("Syntax error in statement [%s,%s]" % (
repr(s.sy), repr(s.systring)))
return ModuleNode(pos, doc = doc, body = body, full_module_name = full_module_name)
#----------------------------------------------
#
# Debugging
#
#----------------------------------------------
def print_parse_tree(f, node, level, key = None):
from Nodes import Node
ind = " " * level
if node:
f.write(ind)
if key:
f.write("%s: " % key)
t = type(node)
if t == TupleType:
f.write("(%s @ %s\n" % (node[0], node[1]))
for i in xrange(2, len(node)):
print_parse_tree(f, node[i], level+1)
f.write("%s)\n" % ind)
return
elif isinstance(node, Node):
try:
tag = node.tag
except AttributeError:
tag = node.__class__.__name__
f.write("%s @ %s\n" % (tag, node.pos))
for name, value in node.__dict__.items():
if name != 'tag' and name != 'pos':
print_parse_tree(f, value, level+1, name)
return
elif t == ListType:
f.write("[\n")
for i in xrange(len(node)):
print_parse_tree(f, node[i], level+1)
f.write("%s]\n" % ind)
return
f.write("%s%s\n" % (ind, node))
#
# Pyrex - Types
#
import string
import Naming
class BaseType:
#
# Base class for all Pyrex types including pseudo-types.
def cast_code(self, expr_code):
return "((%s)%s)" % (self.declaration_code(""), expr_code)
def base_declaration_code(self, base_code, entity_code):
if entity_code:
return "%s %s" % (base_code, entity_code)
else:
return base_code
class PyrexType(BaseType):
#
# Base class for all Pyrex types.
#
# is_pyobject boolean Is a Python object type
# is_extension_type boolean Is a Python extension type
# is_numeric boolean Is a C numeric type
# is_int boolean Is a C integer type
# is_float boolean Is a C floating point type
# is_void boolean Is the C void type
# is_array boolean Is a C array type
# is_ptr boolean Is a C pointer type
# is_null_ptr boolean Is the type of NULL
# is_cfunction boolean Is a C function type
# is_struct_or_union boolean Is a C struct or union type
# is_enum boolean Is a C enum type
# is_typedef boolean Is a typedef type
# is_string boolean Is a C char * type
# is_returncode boolean Is used only to signal exceptions
# is_error boolean Is the dummy error type
# has_attributes boolean Has C dot-selectable attributes
# default_value string Initial value
# parsetuple_format string Format char for PyArg_ParseTuple
# pymemberdef_typecode string Type code for PyMemberDef struct
#
# declaration_code(entity_code,
# for_display = 0, dll_linkage = None, pyrex = 0)
# Returns a code fragment for the declaration of an entity
# of this type, given a code fragment for the entity.
# * If for_display, this is for reading by a human in an error
# message; otherwise it must be valid C code.
# * If dll_linkage is not None, it must be 'DL_EXPORT' or
# 'DL_IMPORT', and will be added to the base type part of
# the declaration.
# * If pyrex = 1, this is for use in a 'cdef extern'
# statement of a Pyrex include file.
#
# assignable_from(src_type)
# Tests whether a variable of this type can be
# assigned a value of type src_type.
#
# same_as(other_type)
# Tests whether this type represents the same type
# as other_type.
#
# as_argument_type():
# Coerces array type into pointer type for use as
# a formal argument type.
#
is_pyobject = 0
is_extension_type = 0
is_numeric = 0
is_int = 0
is_float = 0
is_void = 0
is_array = 0
is_ptr = 0
is_null_ptr = 0
is_cfunction = 0
is_struct_or_union = 0
is_enum = 0
is_typedef = 0
is_string = 0
is_returncode = 0
is_error = 0
has_attributes = 0
default_value = ""
parsetuple_format = ""
pymemberdef_typecode = None
def resolve(self):
# If a typedef, returns the base type.
return self
def literal_code(self, value):
# Returns a C code fragment representing a literal
# value of this type.
return str(value)
def __str__(self):
return string.strip(self.declaration_code("", for_display = 1))
def same_as(self, other_type, **kwds):
return self.same_as_resolved_type(other_type.resolve(), **kwds)
def same_as_resolved_type(self, other_type):
return self == other_type or other_type is error_type
def subtype_of(self, other_type):
return self.subtype_of_resolved_type(other_type.resolve())
def subtype_of_resolved_type(self, other_type):
return self.same_as(other_type)
def assignable_from(self, src_type):
return self.assignable_from_resolved_type(src_type.resolve())
def assignable_from_resolved_type(self, src_type):
return self.same_as(src_type)
def as_argument_type(self):
return self
def is_complete(self):
# A type is incomplete if it is an unsized array,
# a struct whose attributes are not defined, etc.
return 1
class CTypedefType(BaseType):
#
# Pseudo-type defined with a ctypedef statement in a
# 'cdef extern from' block. Delegates most attribute
# lookups to the base type. ANYTHING NOT DEFINED
# HERE IS DELEGATED!
#
# qualified_name string
# typedef_cname string
# typedef_base_type PyrexType
is_typedef = 1
def __init__(self, cname, base_type):
self.typedef_cname = cname
self.typedef_base_type = base_type
def resolve(self):
return self.typedef_base_type.resolve()
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
name = self.declaration_name(for_display, pyrex)
return self.base_declaration_code(name, entity_code)
def declaration_name(self, for_display = 0, pyrex = 0):
if pyrex or for_display:
return self.qualified_name
else:
return self.typedef_cname
def as_argument_type(self):
return self
def cast_code(self, expr_code):
# If self is really an array (rather than pointer), we can't cast.
# For example, the gmp mpz_t.
if self.typedef_base_type.is_ptr:
return self.typedef_base_type.cast_code(expr_code)
else:
return BaseType.cast_code(self, expr_code)
def __repr__(self):
return "<CTypedefType %s>" % self.typedef_cname
def __str__(self):
return self.declaration_name(for_display = 1)
def __getattr__(self, name):
return getattr(self.typedef_base_type, name)
class PyObjectType(PyrexType):
#
# Base class for all Python object types (reference-counted).
#
is_pyobject = 1
default_value = "0"
parsetuple_format = "O"
pymemberdef_typecode = "T_OBJECT"
def __str__(self):
return "Python object"
def __repr__(self):
return "<PyObjectType>"
def assignable_from(self, src_type):
return 1 # Conversion will be attempted
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
if pyrex or for_display:
return self.base_declaration_code("object", entity_code)
else:
return "%s *%s" % (public_decl("PyObject", dll_linkage), entity_code)
class PyExtensionType(PyObjectType):
#
# A Python extension type.
#
# name string
# scope CClassScope Attribute namespace
# visibility string
# typedef_flag boolean
# base_type PyExtensionType or None
# module_name string or None Qualified name of defining module
# objstruct_cname string Name of PyObject struct
# typeobj_cname string or None C code fragment referring to type object
# typeptr_cname string or None Name of pointer to external type object
# vtabslot_cname string Name of C method table member
# vtabstruct_cname string Name of C method table struct
# vtabptr_cname string Name of pointer to C method table
# vtable_cname string Name of C method table definition
is_extension_type = 1
has_attributes = 1
def __init__(self, name, typedef_flag, base_type):
self.name = name
self.scope = None
self.typedef_flag = typedef_flag
self.base_type = base_type
self.module_name = None
self.objstruct_cname = None
self.typeobj_cname = None
self.typeptr_cname = None
self.vtabslot_cname = None
self.vtabstruct_cname = None
self.vtabptr_cname = None
self.vtable_cname = None
def set_scope(self, scope):
self.scope = scope
if scope:
scope.parent_type = self
def subtype_of_resolved_type(self, other_type):
if other_type.is_extension_type:
return self is other_type or (
self.base_type and self.base_type.subtype_of(other_type))
else:
return other_type is py_object_type
def typeobj_is_available(self):
# Do we have a pointer to the type object?
return self.typeptr_cname
def typeobj_is_imported(self):
# If we don't know the C name of the type object but we do
# know which module it's defined in, it will be imported.
return self.typeobj_cname is None and self.module_name is not None
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0, deref = 0):
if pyrex or for_display:
return self.base_declaration_code(self.name, entity_code)
else:
if self.typedef_flag:
base_format = "%s"
else:
base_format = "struct %s"
base = public_decl(base_format % self.objstruct_cname, dll_linkage)
if deref:
return "%s %s" % (base, entity_code)
else:
return "%s *%s" % (base, entity_code)
def attributes_known(self):
return self.scope is not None
def __str__(self):
return self.name
def __repr__(self):
return "<PyExtensionType %s%s>" % (self.scope.class_name,
("", " typedef")[self.typedef_flag])
class CType(PyrexType):
#
# Base class for all C types (non-reference-counted).
#
# to_py_function string C function for converting to Python object
# from_py_function string C function for constructing from Python object
#
to_py_function = None
from_py_function = None
exception_value = None
exception_check = 1
def error_condition(self, result_code):
conds = []
if self.is_string:
conds.append("(!%s)" % result_code)
elif self.exception_value is not None:
conds.append("(%s == (%s)%s)" % (result_code, self.sign_and_name(), self.exception_value))
if self.exception_check:
conds.append("PyErr_Occurred()")
if len(conds) > 0:
return " && ".join(conds)
else:
return 0
class CVoidType(CType):
is_void = 1
def __repr__(self):
return "<CVoidType>"
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
base = public_decl("void", dll_linkage)
return self.base_declaration_code(base, entity_code)
def is_complete(self):
return 0
class CNumericType(CType):
#
# Base class for all C numeric types.
#
# rank integer Relative size
# signed integer 0 = unsigned, 1 = unspecified, 2 = explicitly signed
#
is_numeric = 1
default_value = "0"
parsetuple_formats = ( # rank -> format
"BHIkK????", # unsigned
"bhilL?fd?", # assumed signed
"bhilL?fd?", # explicitly signed
)
sign_words = ("unsigned ", "", "signed ")
def __init__(self, rank, signed = 1, pymemberdef_typecode = None):
self.rank = rank
self.signed = signed
ptf = self.parsetuple_formats[signed][rank]
if ptf == '?':
ptf = None
self.parsetuple_format = ptf
self.pymemberdef_typecode = pymemberdef_typecode
def sign_and_name(self):
s = self.sign_words[self.signed]
n = rank_to_type_name[self.rank]
return s + n
def __repr__(self):
return "<CNumericType %s>" % self.sign_and_name()
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
base = public_decl(self.sign_and_name(), dll_linkage)
return self.base_declaration_code(base, entity_code)
int_conversion_list = {}
type_conversion_functions = ""
type_conversion_predeclarations = ""
class CIntType(CNumericType):
is_int = 1
typedef_flag = 0
to_py_function = "PyInt_FromLong"
from_py_function = "__pyx_PyInt_AsLong"
exception_value = -1
def __init__(self, rank, signed, pymemberdef_typecode = None, is_returncode = 0):
CNumericType.__init__(self, rank, signed, pymemberdef_typecode)
self.is_returncode = is_returncode
if self.from_py_function == '__pyx_PyInt_AsLong':
self.from_py_function = self.get_type_conversion()
def get_type_conversion(self):
# error on overflow
c_type = self.sign_and_name()
c_name = c_type.replace(' ', '_');
func_name = "__pyx_PyInt_%s" % c_name;
if not int_conversion_list.has_key(func_name):
# no env to add utility code to
global type_conversion_predeclarations, type_conversion_functions
if self.signed:
neg_test = ""
else:
neg_test = " || (long_val < 0)"
type_conversion_predeclarations += """
static INLINE %(c_type)s %(func_name)s(PyObject* x);""" % {'c_type': c_type, 'c_name': c_name, 'func_name': func_name }
type_conversion_functions += """
static INLINE %(c_type)s %(func_name)s(PyObject* x) {
if (sizeof(%(c_type)s) < sizeof(long)) {
long long_val = __pyx_PyInt_AsLong(x);
%(c_type)s val = (%(c_type)s)long_val;
if (unlikely((val != long_val) %(neg_test)s)) {
PyErr_SetString(PyExc_OverflowError, "value too large to convert to %(c_type)s");
return (%(c_type)s)-1;
}
return val;
}
else {
return __pyx_PyInt_AsLong(x);
}
}
""" % {'c_type': c_type, 'c_name': c_name, 'func_name': func_name, 'neg_test': neg_test }
int_conversion_list[func_name] = True
return func_name
def assignable_from_resolved_type(self, src_type):
return src_type.is_int or src_type.is_enum or src_type is error_type
class CBIntType(CIntType):
to_py_function = "__Pyx_PyBool_FromLong"
from_py_function = "__Pyx_PyObject_IsTrue"
exception_check = 0
class CAnonEnumType(CIntType):
is_enum = 1
class CUIntType(CIntType):
to_py_function = "PyLong_FromUnsignedLong"
from_py_function = "PyInt_AsUnsignedLongMask"
exception_value = -1
class CULongType(CUIntType):
to_py_function = "PyLong_FromUnsignedLong"
from_py_function = "PyInt_AsUnsignedLongMask"
class CLongLongType(CUIntType):
to_py_function = "PyLong_FromLongLong"
from_py_function = "__pyx_PyInt_AsLongLong"
class CULongLongType(CUIntType):
to_py_function = "PyLong_FromUnsignedLongLong"
from_py_function = "__pyx_PyInt_AsUnsignedLongLong"
class CPySSizeTType(CIntType):
to_py_function = "PyInt_FromSsize_t"
from_py_function = "__pyx_PyIndex_AsSsize_t"
class CFloatType(CNumericType):
is_float = 1
to_py_function = "PyFloat_FromDouble"
from_py_function = "__pyx_PyFloat_AsDouble"
def __init__(self, rank, pymemberdef_typecode = None):
CNumericType.__init__(self, rank, 1, pymemberdef_typecode)
def assignable_from_resolved_type(self, src_type):
return src_type.is_numeric or src_type is error_type
class CArrayType(CType):
# base_type CType Element type
# size integer or None Number of elements
is_array = 1
def __init__(self, base_type, size):
self.base_type = base_type
self.size = size
if base_type is c_char_type:
self.is_string = 1
def __repr__(self):
return "<CArrayType %s %s>" % (self.size, repr(self.base_type))
def same_as_resolved_type(self, other_type):
return ((other_type.is_array and
self.base_type.same_as(other_type.base_type))
or other_type is error_type)
def assignable_from_resolved_type(self, src_type):
# Can't assign to a variable of an array type
return 0
def element_ptr_type(self):
return c_ptr_type(self.base_type)
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
if self.size is not None:
dimension_code = self.size
else:
dimension_code = ""
if entity_code.startswith("*"):
entity_code = "(%s)" % entity_code
return self.base_type.declaration_code(
"%s[%s]" % (entity_code, dimension_code),
for_display, dll_linkage, pyrex)
def as_argument_type(self):
return c_ptr_type(self.base_type)
def is_complete(self):
return self.size is not None
class CPtrType(CType):
# base_type CType Referenced type
is_ptr = 1
default_value = "0"
def __init__(self, base_type):
self.base_type = base_type
def __repr__(self):
return "<CPtrType %s>" % repr(self.base_type)
def same_as_resolved_type(self, other_type):
return ((other_type.is_ptr and
self.base_type.same_as(other_type.base_type))
or other_type is error_type)
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
#print "CPtrType.declaration_code: pointer to", self.base_type ###
return self.base_type.declaration_code(
"*%s" % entity_code,
for_display, dll_linkage, pyrex)
def assignable_from_resolved_type(self, other_type):
if other_type is error_type:
return 1
elif other_type.is_null_ptr:
return 1
elif self.base_type.is_cfunction and other_type.is_cfunction:
return self.base_type.same_as(other_type)
elif other_type.is_array or other_type.is_ptr:
return self.base_type.is_void or self.base_type.same_as(other_type.base_type)
else:
return 0
class CNullPtrType(CPtrType):
is_null_ptr = 1
class CFuncType(CType):
# return_type CType
# args [CFuncTypeArg]
# has_varargs boolean
# exception_value string
# exception_check boolean True if PyErr_Occurred check needed
# calling_convention string Function calling convention
# nogil boolean Can be called without gil
# with_gil boolean Acquire gil around function body
is_cfunction = 1
original_sig = None
def __init__(self, return_type, args, has_varargs = 0,
exception_value = None, exception_check = 0, calling_convention = "",
nogil = 0, with_gil = 0, is_overridable = 0, optional_arg_count = 0):
self.return_type = return_type
self.args = args
self.has_varargs = has_varargs
self.optional_arg_count = optional_arg_count
self.exception_value = exception_value
self.exception_check = exception_check
self.calling_convention = calling_convention
self.nogil = nogil
self.with_gil = with_gil
self.is_overridable = is_overridable
def __repr__(self):
arg_reprs = map(repr, self.args)
if self.has_varargs:
arg_reprs.append("...")
return "<CFuncType %s %s[%s]>" % (
repr(self.return_type),
self.calling_convention_prefix(),
string.join(arg_reprs, ","))
def calling_convention_prefix(self):
cc = self.calling_convention
if cc:
return cc + " "
else:
return ""
def same_c_signature_as(self, other_type, as_cmethod = 0):
return self.same_c_signature_as_resolved_type(
other_type.resolve(), as_cmethod)
def same_c_signature_as_resolved_type(self, other_type, as_cmethod):
#print "CFuncType.same_c_signature_as_resolved_type:", \
# self, other_type, "as_cmethod =", as_cmethod ###
if other_type is error_type:
return 1
if not other_type.is_cfunction:
return 0
if not self.is_overridable and other_type.is_overridable:
return 0
nargs = len(self.args)
if nargs != len(other_type.args):
return 0
# When comparing C method signatures, the first argument
# is exempt from compatibility checking (the proper check
# is performed elsewhere).
for i in range(as_cmethod, nargs):
if not self.args[i].type.same_as(
other_type.args[i].type):
return 0
if self.has_varargs != other_type.has_varargs:
return 0
if self.optional_arg_count != other_type.optional_arg_count:
return 0
if not self.return_type.same_as(other_type.return_type):
return 0
if not self.same_calling_convention_as(other_type):
return 0
return 1
def compatible_signature_with(self, other_type, as_cmethod = 0):
return self.compatible_signature_with_resolved_type(other_type.resolve(), as_cmethod)
def compatible_signature_with_resolved_type(self, other_type, as_cmethod):
#print "CFuncType.same_c_signature_as_resolved_type:", \
# self, other_type, "as_cmethod =", as_cmethod ###
if other_type is error_type:
return 1
if not other_type.is_cfunction:
return 0
if not self.is_overridable and other_type.is_overridable:
return 0
nargs = len(self.args)
if nargs - self.optional_arg_count != len(other_type.args) - other_type.optional_arg_count:
return 0
if self.optional_arg_count < other_type.optional_arg_count:
return 0
# When comparing C method signatures, the first argument
# is exempt from compatibility checking (the proper check
# is performed elsewhere).
for i in range(as_cmethod, len(other_type.args)):
if not self.args[i].type.same_as(
other_type.args[i].type):
return 0
if self.has_varargs != other_type.has_varargs:
return 0
if not self.return_type.subtype_of_resolved_type(other_type.return_type):
return 0
if not self.same_calling_convention_as(other_type):
return 0
self.original_sig = other_type.original_sig or other_type
if as_cmethod:
self.args[0] = other_type.args[0]
return 1
def narrower_c_signature_than(self, other_type, as_cmethod = 0):
return self.narrower_c_signature_than_resolved_type(other_type.resolve(), as_cmethod)
def narrower_c_signature_than_resolved_type(self, other_type, as_cmethod):
if other_type is error_type:
return 1
if not other_type.is_cfunction:
return 0
nargs = len(self.args)
if nargs != len(other_type.args):
return 0
for i in range(as_cmethod, nargs):
if not self.args[i].type.subtype_of_resolved_type(other_type.args[i].type):
return 0
else:
self.args[i].needs_type_test = other_type.args[i].needs_type_test \
or not self.args[i].type.same_as(other_type.args[i].type)
if self.has_varargs != other_type.has_varargs:
return 0
if self.optional_arg_count != other_type.optional_arg_count:
return 0
if not self.return_type.subtype_of_resolved_type(other_type.return_type):
return 0
return 1
def same_calling_convention_as(self, other):
sc1 = self.calling_convention == '__stdcall'
sc2 = other.calling_convention == '__stdcall'
return sc1 == sc2
def same_exception_signature_as(self, other_type):
return self.same_exception_signature_as_resolved_type(
other_type.resolve())
def same_exception_signature_as_resolved_type(self, other_type):
return self.exception_value == other_type.exception_value \
and self.exception_check == other_type.exception_check
def same_as_resolved_type(self, other_type, as_cmethod = 0):
return self.same_c_signature_as_resolved_type(other_type, as_cmethod) \
and self.same_exception_signature_as_resolved_type(other_type)
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
arg_decl_list = []
for arg in self.args[:len(self.args)-self.optional_arg_count]:
arg_decl_list.append(
arg.type.declaration_code("", for_display, pyrex = pyrex))
if self.optional_arg_count:
arg_decl_list.append(self.op_arg_struct.declaration_code(Naming.optional_args_cname))
if self.has_varargs:
arg_decl_list.append("...")
arg_decl_code = string.join(arg_decl_list, ", ")
if not arg_decl_code and not pyrex:
arg_decl_code = "void"
exc_clause = ""
if (pyrex or for_display) and not self.return_type.is_pyobject:
if self.exception_value and self.exception_check:
exc_clause = " except? %s" % self.exception_value
elif self.exception_value:
exc_clause = " except %s" % self.exception_value
elif self.exception_check == '+':
exc_clause = " except +"
else:
" except *"
cc = self.calling_convention_prefix()
if (not entity_code and cc) or entity_code.startswith("*"):
entity_code = "(%s%s)" % (cc, entity_code)
cc = ""
return self.return_type.declaration_code(
"%s%s(%s)%s" % (cc, entity_code, arg_decl_code, exc_clause),
for_display, dll_linkage, pyrex)
def function_header_code(self, func_name, arg_code):
return "%s%s(%s)" % (self.calling_convention_prefix(),
func_name, arg_code)
def signature_string(self):
s = self.declaration_code("")
return s
class CFuncTypeArg:
# name string
# cname string
# type PyrexType
# pos source file position
def __init__(self, name, type, pos):
self.name = name
self.cname = Naming.var_prefix + name
self.type = type
self.pos = pos
self.not_none = False
self.needs_type_test = False # TODO: should these defaults be set in analyse_types()?
def __repr__(self):
return "%s:%s" % (self.name, repr(self.type))
def declaration_code(self, for_display = 0):
return self.type.declaration_code(self.cname, for_display)
class CStructOrUnionType(CType):
# name string
# cname string
# kind string "struct" or "union"
# scope StructOrUnionScope, or None if incomplete
# typedef_flag boolean
is_struct_or_union = 1
has_attributes = 1
def __init__(self, name, kind, scope, typedef_flag, cname):
self.name = name
self.cname = cname
self.kind = kind
self.scope = scope
self.typedef_flag = typedef_flag
def __repr__(self):
return "<CStructOrUnionType %s %s%s>" % (self.name, self.cname,
("", " typedef")[self.typedef_flag])
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
if pyrex:
return self.base_declaration_code(self.name, entity_code)
else:
if for_display:
base = self.name
elif self.typedef_flag:
base = self.cname
else:
base = "%s %s" % (self.kind, self.cname)
return self.base_declaration_code(public_decl(base, dll_linkage), entity_code)
def __cmp__(self, other):
try:
if self.name == other.name:
return 0
else:
return 1
except AttributeError:
return 1
def is_complete(self):
return self.scope is not None
def attributes_known(self):
return self.is_complete()
class CEnumType(CType):
# name string
# cname string or None
# typedef_flag boolean
is_enum = 1
signed = 1
rank = -1 # Ranks below any integer type
to_py_function = "PyInt_FromLong"
from_py_function = "PyInt_AsLong"
def __init__(self, name, cname, typedef_flag):
self.name = name
self.cname = cname
self.values = []
self.typedef_flag = typedef_flag
def __str__(self):
return self.name
def __repr__(self):
return "<CEnumType %s %s%s>" % (self.name, self.cname,
("", " typedef")[self.typedef_flag])
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
if pyrex:
return self.base_declaration_code(self.cname, entity_code)
else:
if self.typedef_flag:
base = self.cname
else:
base = "enum %s" % self.cname
return self.base_declaration_code(public_decl(base, dll_linkage), entity_code)
class CStringType:
# Mixin class for C string types.
is_string = 1
to_py_function = "PyString_FromString"
from_py_function = "PyString_AsString"
exception_value = "NULL"
def literal_code(self, value):
if isinstance(value, unicode):
value = value.encode("UTF-8")
return '"%s"' % value
class CCharArrayType(CStringType, CArrayType):
# C 'char []' type.
parsetuple_format = "s"
pymemberdef_typecode = "T_STRING_INPLACE"
def __init__(self, size):
CArrayType.__init__(self, c_char_type, size)
class CCharPtrType(CStringType, CPtrType):
# C 'char *' type.
parsetuple_format = "s"
pymemberdef_typecode = "T_STRING"
def __init__(self):
CPtrType.__init__(self, c_char_type)
class ErrorType(PyrexType):
# Used to prevent propagation of error messages.
is_error = 1
exception_value = "0"
exception_check = 0
to_py_function = "dummy"
from_py_function = "dummy"
def declaration_code(self, entity_code,
for_display = 0, dll_linkage = None, pyrex = 0):
return "<error>"
def same_as_resolved_type(self, other_type):
return 1
def error_condition(self, result_code):
return "dummy"
rank_to_type_name = (
"char", # 0
"short", # 1
"int", # 2
"long", # 3
"PY_LONG_LONG", # 4
"Py_ssize_t", # 5
"float", # 6
"double", # 7
"long double", # 8
)
py_object_type = PyObjectType()
c_void_type = CVoidType()
c_void_ptr_type = CPtrType(c_void_type)
c_void_ptr_ptr_type = CPtrType(c_void_ptr_type)
c_uchar_type = CIntType(0, 0, "T_UBYTE")
c_ushort_type = CIntType(1, 0, "T_USHORT")
c_uint_type = CUIntType(2, 0, "T_UINT")
c_ulong_type = CULongType(3, 0, "T_ULONG")
c_ulonglong_type = CULongLongType(4, 0, "T_ULONGLONG")
c_char_type = CIntType(0, 1, "T_CHAR")
c_short_type = CIntType(1, 1, "T_SHORT")
c_int_type = CIntType(2, 1, "T_INT")
c_long_type = CIntType(3, 1, "T_LONG")
c_longlong_type = CLongLongType(4, 1, "T_LONGLONG")
c_py_ssize_t_type = CPySSizeTType(5, 1)
c_bint_type = CBIntType(2, 1, "T_INT")
c_schar_type = CIntType(0, 2, "T_CHAR")
c_sshort_type = CIntType(1, 2, "T_SHORT")
c_sint_type = CIntType(2, 2, "T_INT")
c_slong_type = CIntType(3, 2, "T_LONG")
c_slonglong_type = CLongLongType(4, 2, "T_LONGLONG")
c_float_type = CFloatType(6, "T_FLOAT")
c_double_type = CFloatType(7, "T_DOUBLE")
c_longdouble_type = CFloatType(8)
c_null_ptr_type = CNullPtrType(c_void_type)
c_char_array_type = CCharArrayType(None)
c_char_ptr_type = CCharPtrType()
c_char_ptr_ptr_type = CPtrType(c_char_ptr_type)
c_int_ptr_type = CPtrType(c_int_type)
c_returncode_type = CIntType(2, 1, "T_INT", is_returncode = 1)
c_anon_enum_type = CAnonEnumType(-1, 1)
error_type = ErrorType()
lowest_float_rank = 6
sign_and_rank_to_type = {
#(signed, rank)
(0, 0, ): c_uchar_type,
(0, 1): c_ushort_type,
(0, 2): c_uint_type,
(0, 3): c_ulong_type,
(0, 4): c_ulonglong_type,
(0, 5): c_ulonglong_type, # I'm not sure about this. this should be for size_t Py_ssize_t
(1, 0): c_char_type,
(1, 1): c_short_type,
(1, 2): c_int_type,
(1, 3): c_long_type,
(1, 4): c_longlong_type,
(1, 5): c_py_ssize_t_type,
(2, 0): c_schar_type,
(2, 1): c_sshort_type,
(2, 2): c_sint_type,
(2, 3): c_slong_type,
(2, 4): c_slonglong_type,
(2, 5): c_py_ssize_t_type,
(1, 6): c_float_type,
(1, 7): c_double_type,
(1, 8): c_longdouble_type,
}
modifiers_and_name_to_type = {
#(signed, longness, name)
(0, 0, "char"): c_uchar_type,
(0, -1, "int"): c_ushort_type,
(0, 0, "int"): c_uint_type,
(0, 1, "int"): c_ulong_type,
(0, 2, "int"): c_ulonglong_type,
(1, 0, "void"): c_void_type,
(1, 0, "char"): c_char_type,
(1, -1, "int"): c_short_type,
(1, 0, "int"): c_int_type,
(1, 1, "int"): c_long_type,
(1, 2, "int"): c_longlong_type,
(1, 0, "Py_ssize_t"): c_py_ssize_t_type,
(1, 0, "float"): c_float_type,
(1, 0, "double"): c_double_type,
(1, 1, "double"): c_longdouble_type,
(1, 0, "object"): py_object_type,
(1, 0, "bint"): c_bint_type,
(2, 0, "char"): c_schar_type,
(2, -1, "int"): c_sshort_type,
(2, 0, "int"): c_sint_type,
(2, 1, "int"): c_slong_type,
(2, 2, "int"): c_slonglong_type,
(2, 0, "Py_ssize_t"): c_py_ssize_t_type,
}
def widest_numeric_type(type1, type2):
# Given two numeric types, return the narrowest type
# encompassing both of them.
if type1.is_enum and type2.is_enum:
widest_type = c_int_type
elif type2.rank > type1.rank:
widest_type = type2
else:
widest_type = type1
return widest_type
def simple_c_type(signed, longness, name):
# Find type descriptor for simple type given name and modifiers.
# Returns None if arguments don't make sense.
return modifiers_and_name_to_type.get((signed, longness, name))
def c_array_type(base_type, size):
# Construct a C array type.
if base_type is c_char_type:
return CCharArrayType(size)
else:
return CArrayType(base_type, size)
def c_ptr_type(base_type):
# Construct a C pointer type.
if base_type is c_char_type:
return c_char_ptr_type
else:
return CPtrType(base_type)
def public_decl(base, dll_linkage):
if dll_linkage:
return "%s(%s)" % (dll_linkage, base)
else:
return base
def same_type(type1, type2):
return type1.same_as(type2)
def assignable_from(type1, type2):
return type1.assignable_from(type2)
def typecast(to_type, from_type, expr_code):
# Return expr_code cast to a C type which can be
# assigned to to_type, assuming its existing C type
# is from_type.
if to_type is from_type or \
(not to_type.is_pyobject and assignable_from(to_type, from_type)):
return expr_code
else:
#print "typecast: to", to_type, "from", from_type ###
return to_type.cast_code(expr_code)
type_conversion_predeclarations = """
/* Type Conversion Predeclarations */
#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False))
static INLINE int __Pyx_PyObject_IsTrue(PyObject* x);
static INLINE PY_LONG_LONG __pyx_PyInt_AsLongLong(PyObject* x);
static INLINE unsigned PY_LONG_LONG __pyx_PyInt_AsUnsignedLongLong(PyObject* x);
static INLINE Py_ssize_t __pyx_PyIndex_AsSsize_t(PyObject* b);
#define __pyx_PyInt_AsLong(x) (PyInt_CheckExact(x) ? PyInt_AS_LONG(x) : PyInt_AsLong(x))
#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
""" + type_conversion_predeclarations
type_conversion_functions = """
/* Type Conversion Functions */
static INLINE Py_ssize_t __pyx_PyIndex_AsSsize_t(PyObject* b) {
Py_ssize_t ival;
PyObject* x = PyNumber_Index(b);
if (!x) return -1;
ival = PyInt_AsSsize_t(x);
Py_DECREF(x);
return ival;
}
static INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
if (x == Py_True) return 1;
else if (x == Py_False) return 0;
else return PyObject_IsTrue(x);
}
static INLINE PY_LONG_LONG __pyx_PyInt_AsLongLong(PyObject* x) {
if (PyInt_CheckExact(x)) {
return PyInt_AS_LONG(x);
}
else if (PyLong_CheckExact(x)) {
return PyLong_AsLongLong(x);
}
else {
PY_LONG_LONG val;
PyObject* tmp = PyNumber_Int(x); if (!tmp) return (PY_LONG_LONG)-1;
val = __pyx_PyInt_AsLongLong(tmp);
Py_DECREF(tmp);
return val;
}
}
static INLINE unsigned PY_LONG_LONG __pyx_PyInt_AsUnsignedLongLong(PyObject* x) {
if (PyInt_CheckExact(x)) {
long val = PyInt_AS_LONG(x);
if (unlikely(val < 0)) {
PyErr_SetString(PyExc_TypeError, "Negative assignment to unsigned type.");
return (unsigned PY_LONG_LONG)-1;
}
return val;
}
else if (PyLong_CheckExact(x)) {
return PyLong_AsUnsignedLongLong(x);
}
else {
PY_LONG_LONG val;
PyObject* tmp = PyNumber_Int(x); if (!tmp) return (PY_LONG_LONG)-1;
val = __pyx_PyInt_AsUnsignedLongLong(tmp);
Py_DECREF(tmp);
return val;
}
}
""" + type_conversion_functions
#
# Pyrex Scanner
#
#import pickle
import cPickle as pickle
import os
import platform
import stat
import sys
from time import time
from Cython import Plex
from Cython.Plex import Scanner
from Cython.Plex.Errors import UnrecognizedInput
from Errors import CompileError, error
from Lexicon import string_prefixes, make_lexicon
plex_version = getattr(Plex, '_version', None)
#print "Plex version:", plex_version ###
debug_scanner = 0
trace_scanner = 0
scanner_debug_flags = 0
scanner_dump_file = None
binary_lexicon_pickle = 1
notify_lexicon_unpickling = 0
notify_lexicon_pickling = 1
lexicon = None
#-----------------------------------------------------------------
def hash_source_file(path):
# Try to calculate a hash code for the given source file.
# Returns an empty string if the file cannot be accessed.
#print "Hashing", path ###
import md5
try:
try:
f = open(path, "rU")
text = f.read()
except IOError, e:
print("Unable to hash scanner source file (%s)" % e)
return ""
finally:
f.close()
# Normalise spaces/tabs. We don't know what sort of
# space-tab substitution the file may have been
# through, so we replace all spans of spaces and
# tabs by a single space.
import re
text = re.sub("[ \t]+", " ", text)
hash = md5.new(text).hexdigest()
return hash
def open_pickled_lexicon(expected_hash):
# Try to open pickled lexicon file and verify that
# it matches the source file. Returns the opened
# file if successful, otherwise None. ???
f = None
result = None
if os.path.exists(lexicon_pickle):
try:
f = open(lexicon_pickle, "rb")
actual_hash = pickle.load(f)
if actual_hash == expected_hash:
result = f
f = None
else:
print("Lexicon hash mismatch:") ###
print(" expected " + expected_hash) ###
print(" got " + actual_hash) ###
except IOError, e:
print("Warning: Unable to read pickled lexicon " + lexicon_pickle)
print(e)
if f:
f.close()
return result
def try_to_unpickle_lexicon():
global lexicon, lexicon_pickle, lexicon_hash
dir = os.path.dirname(__file__)
source_file = os.path.join(dir, "Lexicon.py")
lexicon_hash = hash_source_file(source_file)
lexicon_pickle = os.path.join(dir, "Lexicon.pickle")
f = open_pickled_lexicon(expected_hash = lexicon_hash)
if f:
if notify_lexicon_unpickling:
t0 = time()
print("Unpickling lexicon...")
lexicon = pickle.load(f)
f.close()
if notify_lexicon_unpickling:
t1 = time()
print("Done (%.2f seconds)" % (t1 - t0))
def create_new_lexicon():
global lexicon
t0 = time()
print("Creating lexicon...")
lexicon = make_lexicon()
t1 = time()
print("Done (%.2f seconds)" % (t1 - t0))
def pickle_lexicon():
f = None
try:
f = open(lexicon_pickle, "wb")
except IOError:
print("Warning: Unable to save pickled lexicon in " + lexicon_pickle)
if f:
if notify_lexicon_pickling:
t0 = time()
print("Pickling lexicon...")
pickle.dump(lexicon_hash, f, binary_lexicon_pickle)
pickle.dump(lexicon, f, binary_lexicon_pickle)
f.close()
if notify_lexicon_pickling:
t1 = time()
print("Done (%.2f seconds)" % (t1 - t0))
def get_lexicon():
global lexicon
if not lexicon and plex_version is None:
try_to_unpickle_lexicon()
if not lexicon:
create_new_lexicon()
if plex_version is None:
pickle_lexicon()
return lexicon
#------------------------------------------------------------------
reserved_words = [
"global", "include", "ctypedef", "cdef", "def", "class",
"print", "del", "pass", "break", "continue", "return",
"raise", "import", "exec", "try", "except", "finally",
"while", "if", "elif", "else", "for", "in", "assert",
"and", "or", "not", "is", "in", "lambda", "from",
"NULL", "cimport", "by", "with", "cpdef", "DEF", "IF", "ELIF", "ELSE"
]
class Method:
def __init__(self, name):
self.name = name
self.__name__ = name # for Plex tracing
def __call__(self, stream, text):
return getattr(stream, self.name)(text)
#------------------------------------------------------------------
def build_resword_dict():
d = {}
for word in reserved_words:
d[word] = 1
return d
#------------------------------------------------------------------
class CompileTimeScope(object):
def __init__(self, outer = None):
self.entries = {}
self.outer = outer
def declare(self, name, value):
self.entries[name] = value
def lookup_here(self, name):
return self.entries[name]
def lookup(self, name):
try:
return self.lookup_here(name)
except KeyError:
outer = self.outer
if outer:
return outer.lookup(name)
else:
raise
def initial_compile_time_env():
benv = CompileTimeScope()
names = ('UNAME_SYSNAME', 'UNAME_NODENAME', 'UNAME_RELEASE',
'UNAME_VERSION', 'UNAME_MACHINE')
for name, value in zip(names, platform.uname()):
benv.declare(name, value)
import __builtin__
names = ('False', 'True',
'abs', 'bool', 'chr', 'cmp', 'complex', 'dict', 'divmod', 'enumerate',
'float', 'hash', 'hex', 'int', 'len', 'list', 'long', 'map', 'max', 'min',
'oct', 'ord', 'pow', 'range', 'reduce', 'repr', 'round', 'slice', 'str',
'sum', 'tuple', 'xrange', 'zip')
for name in names:
benv.declare(name, getattr(__builtin__, name))
denv = CompileTimeScope(benv)
return denv
#------------------------------------------------------------------
class PyrexScanner(Scanner):
# context Context Compilation context
# type_names set Identifiers to be treated as type names
# compile_time_env dict Environment for conditional compilation
# compile_time_eval boolean In a true conditional compilation context
# compile_time_expr boolean In a compile-time expression context
resword_dict = build_resword_dict()
def __init__(self, file, filename, parent_scanner = None,
type_names = None, context = None):
Scanner.__init__(self, get_lexicon(), file, filename)
if parent_scanner:
self.context = parent_scanner.context
self.type_names = parent_scanner.type_names
self.compile_time_env = parent_scanner.compile_time_env
self.compile_time_eval = parent_scanner.compile_time_eval
self.compile_time_expr = parent_scanner.compile_time_expr
else:
self.context = context
self.type_names = type_names
self.compile_time_env = initial_compile_time_env()
self.compile_time_eval = 1
self.compile_time_expr = 0
self.trace = trace_scanner
self.indentation_stack = [0]
self.indentation_char = None
self.bracket_nesting_level = 0
self.begin('INDENT')
self.sy = ''
self.next()
def current_level(self):
return self.indentation_stack[-1]
def open_bracket_action(self, text):
self.bracket_nesting_level = self.bracket_nesting_level + 1
return text
def close_bracket_action(self, text):
self.bracket_nesting_level = self.bracket_nesting_level - 1
return text
def newline_action(self, text):
if self.bracket_nesting_level == 0:
self.begin('INDENT')
self.produce('NEWLINE', '')
string_states = {
"'": 'SQ_STRING',
'"': 'DQ_STRING',
"'''": 'TSQ_STRING',
'"""': 'TDQ_STRING'
}
def begin_string_action(self, text):
if text[:1] in string_prefixes:
text = text[1:]
self.begin(self.string_states[text])
self.produce('BEGIN_STRING')
def end_string_action(self, text):
self.begin('')
self.produce('END_STRING')
def unclosed_string_action(self, text):
self.end_string_action(text)
self.error("Unclosed string literal")
def indentation_action(self, text):
self.begin('')
# Indentation within brackets should be ignored.
#if self.bracket_nesting_level > 0:
# return
# Check that tabs and spaces are being used consistently.
if text:
c = text[0]
#print "Scanner.indentation_action: indent with", repr(c) ###
if self.indentation_char is None:
self.indentation_char = c
#print "Scanner.indentation_action: setting indent_char to", repr(c)
else:
if self.indentation_char != c:
self.error("Mixed use of tabs and spaces")
if text.replace(c, "") != "":
self.error("Mixed use of tabs and spaces")
# Figure out how many indents/dedents to do
current_level = self.current_level()
new_level = len(text)
#print "Changing indent level from", current_level, "to", new_level ###
if new_level == current_level:
return
elif new_level > current_level:
#print "...pushing level", new_level ###
self.indentation_stack.append(new_level)
self.produce('INDENT', '')
else:
while new_level < self.current_level():
#print "...popping level", self.indentation_stack[-1] ###
self.indentation_stack.pop()
self.produce('DEDENT', '')
#print "...current level now", self.current_level() ###
if new_level != self.current_level():
self.error("Inconsistent indentation")
def eof_action(self, text):
while len(self.indentation_stack) > 1:
self.produce('DEDENT', '')
self.indentation_stack.pop()
self.produce('EOF', '')
def next(self):
try:
sy, systring = self.read()
except UnrecognizedInput:
self.error("Unrecognized character")
if sy == 'IDENT' and systring in self.resword_dict:
sy = systring
self.sy = sy
self.systring = systring
if debug_scanner:
_, line, col = self.position()
if not self.systring or self.sy == self.systring:
t = self.sy
else:
t = "%s %s" % (self.sy, self.systring)
print("--- %3d %2d %s" % (line, col, t))
def put_back(self, sy, systring):
self.unread(self.sy, self.systring)
self.sy = sy
self.systring = systring
def unread(self, token, value):
# This method should be added to Plex
self.queue.insert(0, (token, value))
def add_type_name(self, name):
self.type_names[name] = 1
def looking_at_type_name(self):
return self.sy == 'IDENT' and self.systring in self.type_names
def error(self, message, pos = None):
if pos is None:
pos = self.position()
if self.sy == 'INDENT':
error(pos, "Possible inconsistent indentation")
raise error(pos, message)
def expect(self, what, message = None):
if self.sy == what:
self.next()
else:
self.expected(what, message)
def expect_keyword(self, what, message = None):
if self.sy == 'IDENT' and self.systring == what:
self.next()
else:
self.expected(what, message)
def expected(self, what, message):
if message:
self.error(message)
else:
self.error("Expected '%s'" % what)
def expect_indent(self):
self.expect('INDENT',
"Expected an increase in indentation level")
def expect_dedent(self):
self.expect('DEDENT',
"Expected a decrease in indentation level")
def expect_newline(self, message = "Expected a newline"):
# Expect either a newline or end of file
if self.sy != 'EOF':
self.expect('NEWLINE', message)
#
# Pyrex - Symbol Table
#
import re
import bisect
from Errors import warning, error, InternalError
import Options
import Naming
import PyrexTypes
from PyrexTypes import *
import TypeSlots
from TypeSlots import \
pyfunction_signature, pymethod_signature, \
get_special_method_signature, get_property_accessor_signature
import ControlFlow
import __builtin__
identifier_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*$")
class Entry:
# A symbol table entry in a Scope or ModuleNamespace.
#
# name string Python name of entity
# cname string C name of entity
# type PyrexType Type of entity
# doc string Doc string
# init string Initial value
# visibility 'private' or 'public' or 'extern'
# is_builtin boolean Is an entry in the Python builtins dict
# is_cglobal boolean Is a C global variable
# is_pyglobal boolean Is a Python module-level variable
# or class attribute during
# class construction
# is_member boolean Is an assigned class member
# is_variable boolean Is a variable
# is_cfunction boolean Is a C function
# is_cmethod boolean Is a C method of an extension type
# is_unbound_cmethod boolean Is an unbound C method of an extension type
# is_type boolean Is a type definition
# is_const boolean Is a constant
# is_property boolean Is a property of an extension type:
# doc_cname string or None C const holding the docstring
# getter_cname string C func for getting property
# setter_cname string C func for setting or deleting property
# is_self_arg boolean Is the "self" arg of an exttype method
# is_arg boolean Is the arg of a method
# is_local boolean Is a local variable
# is_readonly boolean Can't be assigned to
# func_cname string C func implementing Python func
# pos position Source position where declared
# namespace_cname string If is_pyglobal, the C variable
# holding its home namespace
# pymethdef_cname string PyMethodDef structure
# signature Signature Arg & return types for Python func
# init_to_none boolean True if initial value should be None
# as_variable Entry Alternative interpretation of extension
# type name or builtin C function as a variable
# xdecref_cleanup boolean Use Py_XDECREF for error cleanup
# in_cinclude boolean Suppress C declaration code
# enum_values [Entry] For enum types, list of values
# qualified_name string "modname.funcname" or "modname.classname"
# or "modname.classname.funcname"
# is_declared_generic boolean Is declared as PyObject * even though its
# type is an extension type
# as_module None Module scope, if a cimported module
# is_inherited boolean Is an inherited attribute of an extension type
# #interned_cname string C name of interned name string
# pystring_cname string C name of Python version of string literal
# is_interned boolean For string const entries, value is interned
# used boolean
# is_special boolean Is a special method or property accessor
# of an extension type
# defined_in_pxd boolean Is defined in a .pxd file (not just declared)
# api boolean Generate C API for C class or function
# utility_code string Utility code needed when this entry is used
borrowed = 0
init = ""
visibility = 'private'
is_builtin = 0
is_cglobal = 0
is_pyglobal = 0
is_member = 0
is_variable = 0
is_cfunction = 0
is_cmethod = 0
is_unbound_cmethod = 0
is_type = 0
is_const = 0
is_property = 0
doc_cname = None
getter_cname = None
setter_cname = None
is_self_arg = 0
is_arg = 0
is_local = 0
is_declared_generic = 0
is_readonly = 0
func_cname = None
doc = None
init_to_none = 0
as_variable = None
xdecref_cleanup = 0
in_cinclude = 0
as_module = None
is_inherited = 0
#interned_cname = None
pystring_cname = None
is_interned = 0
used = 0
is_special = 0
defined_in_pxd = 0
api = 0
utility_code = None
is_overridable = 0
def __init__(self, name, cname, type, pos = None, init = None):
self.name = name
self.cname = cname
self.type = type
self.pos = pos
self.init = init
class Scope:
# name string Unqualified name
# outer_scope Scope or None Enclosing scope
# entries {string : Entry} Python name to entry, non-types
# const_entries [Entry] Constant entries
# type_entries [Entry] Struct/union/enum/typedef/exttype entries
# sue_entries [Entry] Struct/union/enum entries
# arg_entries [Entry] Function argument entries
# var_entries [Entry] User-defined variable entries
# pyfunc_entries [Entry] Python function entries
# cfunc_entries [Entry] C function entries
# c_class_entries [Entry] All extension type entries
# temp_entries [Entry] Temporary variable entries
# free_temp_entries [Entry] Temp variables currently unused
# temp_counter integer Counter for naming temp vars
# cname_to_entry {string : Entry} Temp cname to entry mapping
# int_to_entry {int : Entry} Temp cname to entry mapping
# pow_function_used boolean The C pow() function is used
# return_type PyrexType or None Return type of function owning scope
# is_py_class_scope boolean Is a Python class scope
# is_c_class_scope boolean Is an extension type scope
# scope_prefix string Disambiguator for C names
# in_cinclude boolean Suppress C declaration code
# qualified_name string "modname" or "modname.classname"
# pystring_entries [Entry] String const entries newly used as
# Python strings in this scope
# control_flow ControlFlow Used for keeping track of environment state
is_py_class_scope = 0
is_c_class_scope = 0
is_module_scope = 0
scope_prefix = ""
in_cinclude = 0
def __init__(self, name, outer_scope, parent_scope):
# The outer_scope is the next scope in the lookup chain.
# The parent_scope is used to derive the qualified name of this scope.
self.name = name
self.outer_scope = outer_scope
self.parent_scope = parent_scope
mangled_name = "%d%s_" % (len(name), name)
qual_scope = self.qualifying_scope()
if qual_scope:
self.qualified_name = qual_scope.qualify_name(name)
self.scope_prefix = qual_scope.scope_prefix + mangled_name
else:
self.qualified_name = name
self.scope_prefix = mangled_name
self.entries = {}
self.const_entries = []
self.type_entries = []
self.sue_entries = []
self.arg_entries = []
self.var_entries = []
self.pyfunc_entries = []
self.cfunc_entries = []
self.c_class_entries = []
self.defined_c_classes = []
self.imported_c_classes = {}
self.temp_entries = []
self.free_temp_entries = []
#self.pending_temp_entries = [] # TEMPORARY
self.temp_counter = 1
self.cname_to_entry = {}
self.pow_function_used = 0
self.string_to_entry = {}
self.num_to_entry = {}
self.obj_to_entry = {}
self.pystring_entries = []
self.control_flow = ControlFlow.LinearControlFlow()
def start_branching(self, pos):
self.control_flow = self.control_flow.start_branch(pos)
def next_branch(self, pos):
self.control_flow = self.control_flow.next_branch(pos)
def finish_branching(self, pos):
self.control_flow = self.control_flow.finish_branch(pos)
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.qualified_name)
def intern(self, name):
return self.global_scope().intern(name)
def qualifying_scope(self):
return self.parent_scope
def mangle(self, prefix, name = None):
if name:
return "%s%s%s" % (prefix, self.scope_prefix, name)
else:
return self.parent_scope.mangle(prefix, self.name)
def mangle_internal(self, name):
# Mangle an internal name so as not to clash with any
# user-defined name in this scope.
prefix = "%s%s_" % (Naming.pyrex_prefix, name)
return self.mangle(prefix)
#return self.parent_scope.mangle(prefix, self.name)
def global_scope(self):
# Return the module-level scope containing this scope.
return self.outer_scope.global_scope()
def builtin_scope(self):
# Return the module-level scope containing this scope.
return self.outer_scope.builtin_scope()
def declare(self, name, cname, type, pos):
# Create new entry, and add to dictionary if
# name is not None. Reports a warning if already
# declared.
if not self.in_cinclude and cname and re.match("^_[_A-Z]+$", cname):
# See http://www.gnu.org/software/libc/manual/html_node/Reserved-Names.html#Reserved-Names
warning(pos, "'%s' is a reserved name in C." % cname, -1)
dict = self.entries
if name and dict.has_key(name):
warning(pos, "'%s' redeclared " % name, 0)
entry = Entry(name, cname, type, pos = pos)
entry.in_cinclude = self.in_cinclude
if name:
entry.qualified_name = self.qualify_name(name)
dict[name] = entry
entry.scope = self
return entry
def qualify_name(self, name):
return "%s.%s" % (self.qualified_name, name)
def declare_const(self, name, type, value, pos, cname = None):
# Add an entry for a named constant.
if not cname:
if self.in_cinclude:
cname = name
else:
cname = self.mangle(Naming.enum_prefix, name)
entry = self.declare(name, cname, type, pos)
entry.is_const = 1
entry.value = value
return entry
def declare_type(self, name, type, pos,
cname = None, visibility = 'private', defining = 1):
# Add an entry for a type definition.
if not cname:
cname = name
entry = self.declare(name, cname, type, pos)
entry.visibility = visibility
entry.is_type = 1
if defining:
self.type_entries.append(entry)
return entry
def declare_typedef(self, name, base_type, pos, cname = None,
visibility = 'private'):
if not cname:
if self.in_cinclude or visibility == 'public':
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
type = PyrexTypes.CTypedefType(cname, base_type)
entry = self.declare_type(name, type, pos, cname, visibility)
type.qualified_name = entry.qualified_name
return entry
def declare_struct_or_union(self, name, kind, scope,
typedef_flag, pos, cname = None, visibility = 'private'):
# Add an entry for a struct or union definition.
if not cname:
if self.in_cinclude or visibility == 'public':
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
entry = self.lookup_here(name)
if not entry:
type = CStructOrUnionType(name, kind, scope, typedef_flag, cname)
entry = self.declare_type(name, type, pos, cname,
visibility = visibility, defining = scope is not None)
self.sue_entries.append(entry)
else:
if not (entry.is_type and entry.type.is_struct_or_union):
warning(pos, "'%s' redeclared " % name, 0)
elif scope and entry.type.scope:
warning(pos, "'%s' already defined (ignoring second definition)" % name, 0)
else:
self.check_previous_typedef_flag(entry, typedef_flag, pos)
self.check_previous_visibility(entry, visibility, pos)
if scope:
entry.type.scope = scope
self.type_entries.append(entry)
if not scope and not entry.type.scope:
self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
return entry
def check_previous_typedef_flag(self, entry, typedef_flag, pos):
if typedef_flag != entry.type.typedef_flag:
error(pos, "'%s' previously declared using '%s'" % (
entry.name, ("cdef", "ctypedef")[entry.type.typedef_flag]))
def check_previous_visibility(self, entry, visibility, pos):
if entry.visibility != visibility:
error(pos, "'%s' previously declared as '%s'" % (
entry.name, entry.visibility))
def declare_enum(self, name, pos, cname, typedef_flag,
visibility = 'private'):
if name:
if not cname:
if self.in_cinclude or visibility == 'public':
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
type = CEnumType(name, cname, typedef_flag)
else:
type = PyrexTypes.c_anon_enum_type
entry = self.declare_type(name, type, pos, cname = cname,
visibility = visibility)
entry.enum_values = []
self.sue_entries.append(entry)
return entry
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a variable.
if not cname:
if visibility != 'private':
cname = name
else:
cname = self.mangle(Naming.var_prefix, name)
entry = self.declare(name, cname, type, pos)
entry.is_variable = 1
entry.visibility = visibility
self.control_flow.set_state((), (name, 'initalized'), False)
return entry
def declare_builtin(self, name, pos):
return self.outer_scope.declare_builtin(name, pos)
def declare_pyfunction(self, name, pos):
# Add an entry for a Python function.
entry = self.declare_var(name, py_object_type, pos)
entry.signature = pyfunction_signature
self.pyfunc_entries.append(entry)
return entry
def register_pyfunction(self, entry):
self.pyfunc_entries.append(entry)
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', defining = 0, api = 0, in_pxd = 0):
# Add an entry for a C function.
entry = self.lookup_here(name)
if entry:
if visibility != 'private' and visibility != entry.visibility:
warning(pos, "Function '%s' previously declared as '%s'" % (name, entry.visibility), 1)
if not entry.type.same_as(type):
warning(pos, "Function signature does not match previous declaration", 1)
entry.type = type
else:
if not cname:
if api or visibility != 'private':
cname = name
else:
cname = self.mangle(Naming.func_prefix, name)
entry = self.add_cfunction(name, type, pos, cname, visibility)
entry.func_cname = cname
if in_pxd and visibility != 'extern':
entry.defined_in_pxd = 1
if api:
entry.api = 1
if not defining and not in_pxd and visibility != 'extern':
error(pos, "Non-extern C function declared but not defined")
return entry
def add_cfunction(self, name, type, pos, cname, visibility):
# Add a C function entry without giving it a func_cname.
entry = self.declare(name, cname, type, pos)
entry.is_cfunction = 1
entry.visibility = visibility
self.cfunc_entries.append(entry)
return entry
def find(self, name, pos):
# Look up name, report error if not found.
entry = self.lookup(name)
if entry:
return entry
else:
error(pos, "'%s' is not declared" % name)
def lookup(self, name):
# Look up name in this scope or an enclosing one.
# Return None if not found.
return (self.lookup_here(name)
or (self.outer_scope and self.outer_scope.lookup(name))
or None)
def lookup_here(self, name):
# Look up in this scope only, return None if not found.
return self.entries.get(name, None)
def lookup_target(self, name):
# Look up name in this scope only. Declare as Python
# variable if not found.
entry = self.lookup_here(name)
if not entry:
entry = self.declare_var(name, py_object_type, None)
return entry
def add_string_const(self, value):
# Add an entry for a string constant.
cname = self.new_const_cname()
entry = Entry("", cname, c_char_array_type, init = value)
entry.used = 1
self.const_entries.append(entry)
return entry
def get_string_const(self, value):
# Get entry for string constant. Returns an existing
# one if possible, otherwise creates a new one.
genv = self.global_scope()
entry = genv.string_to_entry.get(value)
if not entry:
entry = self.add_string_const(value)
genv.string_to_entry[value] = entry
return entry
def add_py_string(self, entry):
# If not already done, allocate a C name for a Python version of
# a string literal, and add it to the list of Python strings to
# be created at module init time. If the string resembles a
# Python identifier, it will be interned.
if not entry.pystring_cname:
value = entry.init
if identifier_pattern.match(value) and isinstance(value, str):
entry.pystring_cname = self.intern(value)
entry.is_interned = 1
else:
entry.pystring_cname = entry.cname + "p"
self.pystring_entries.append(entry)
self.global_scope().all_pystring_entries.append(entry)
def add_py_num(self, value):
# Add an entry for an int constant.
cname = "%s%s" % (Naming.interned_num_prefix, value)
cname = cname.replace('-', 'neg_').replace('.','_')
entry = Entry("", cname, py_object_type, init = value)
entry.used = 1
entry.is_interned = 1
self.const_entries.append(entry)
self.interned_nums.append(entry)
return entry
def get_py_num(self, value):
# Get entry for int constant. Returns an existing
# one if possible, otherwise creates a new one.
genv = self.global_scope()
entry = genv.num_to_entry.get(value)
if not entry:
entry = genv.add_py_num(value)
genv.num_to_entry[value] = entry
genv.pynum_entries.append(entry)
return entry
def add_py_obj(self, obj, c_prefix=''):
obj.check_const()
cname = self.new_const_cname(c_prefix)
entry = Entry("", cname, py_object_type, init = value)
entry.used = 1
entry.is_interned = 1
self.const_entries.append(entry)
self.interned_objs.append(entry)
return entry
def get_py_obj(self, obj, c_prefix=''):
# Get entry for a generic constant. Returns an existing
# one if possible, otherwise creates a new one.
genv = self.global_scope()
entry = genv.obj_to_entry.get(obj)
if not entry:
entry = genv.add_py_num(obj, c_prefix)
genv.obj_to_entry[obj] = entry
return entry
def new_const_cname(self):
# Create a new globally-unique name for a constant.
return self.global_scope().new_const_cname()
def allocate_temp(self, type):
# Allocate a temporary variable of the given type from the
# free list if available, otherwise create a new one.
# Returns the cname of the variable.
for entry in self.free_temp_entries:
if entry.type == type:
self.free_temp_entries.remove(entry)
return entry.cname
n = self.temp_counter
self.temp_counter = n + 1
cname = "%s%d" % (Naming.pyrex_prefix, n)
entry = Entry("", cname, type)
entry.used = 1
if type.is_pyobject or type == c_py_ssize_t_type:
entry.init = "0"
self.cname_to_entry[entry.cname] = entry
self.temp_entries.append(entry)
return entry.cname
def allocate_temp_pyobject(self):
# Allocate a temporary PyObject variable.
return self.allocate_temp(py_object_type)
def release_temp(self, cname):
# Release a temporary variable for re-use.
if not cname: # can happen when type of an expr is void
return
entry = self.cname_to_entry[cname]
if entry in self.free_temp_entries:
raise InternalError("Temporary variable %s released more than once"
% cname)
self.free_temp_entries.append(entry)
def temps_in_use(self):
# Return a new list of temp entries currently in use.
return [entry for entry in self.temp_entries
if entry not in self.free_temp_entries]
#def recycle_pending_temps(self):
# # Obsolete
# pass
def use_utility_code(self, new_code):
self.global_scope().use_utility_code(new_code)
def generate_library_function_declarations(self, code):
# Generate extern decls for C library funcs used.
#if self.pow_function_used:
# code.putln("%s double pow(double, double);" % Naming.extern_c_macro)
pass
def defines_any(self, names):
# Test whether any of the given names are
# defined in this scope.
for name in names:
if name in self.entries:
return 1
return 0
class PreImportScope(Scope):
def __init__(self):
Scope.__init__(self, Options.pre_import, None, None)
def declare_builtin(self, name, pos):
entry = self.declare(name, name, py_object_type, pos)
entry.is_variable = True
entry.is_pyglobal = True
entry.namespace_cname = Naming.preimport_cname
return entry
class BuiltinScope(Scope):
# The builtin namespace.
def __init__(self):
if Options.pre_import is None:
Scope.__init__(self, "__builtin__", None, None)
else:
Scope.__init__(self, "__builtin__", PreImportScope(), None)
for name, definition in self.builtin_entries.iteritems():
cname, type = definition
self.declare_var(name, type, None, cname)
def declare_builtin(self, name, pos):
if not hasattr(__builtin__, name):
if self.outer_scope is not None:
return self.outer_scope.declare_builtin(name, pos)
else:
error(pos, "undeclared name not builtin: %s"%name)
def declare_builtin_cfunction(self, name, type, cname, python_equiv = None,
utility_code = None):
# If python_equiv == "*", the Python equivalent has the same name
# as the entry, otherwise it has the name specified by python_equiv.
entry = self.declare_cfunction(name, type, None, cname)
entry.utility_code = utility_code
if python_equiv:
if python_equiv == "*":
python_equiv = name
var_entry = Entry(python_equiv, python_equiv, py_object_type)
var_entry.is_variable = 1
var_entry.is_builtin = 1
entry.as_variable = var_entry
return entry
def builtin_scope(self):
return self
builtin_entries = {
"int": ["((PyObject*)&PyInt_Type)", py_object_type],
"long": ["((PyObject*)&PyLong_Type)", py_object_type],
"float": ["((PyObject*)&PyFloat_Type)", py_object_type],
"str": ["((PyObject*)&PyString_Type)", py_object_type],
"unicode":["((PyObject*)&PyUnicode_Type)", py_object_type],
"tuple": ["((PyObject*)&PyTuple_Type)", py_object_type],
"list": ["((PyObject*)&PyList_Type)", py_object_type],
"dict": ["((PyObject*)&PyDict_Type)", py_object_type],
"set": ["((PyObject*)&PySet_Type)", py_object_type],
"frozenset": ["((PyObject*)&PyFrozenSet_Type)", py_object_type],
"type": ["((PyObject*)&PyType_Type)", py_object_type],
"slice": ["((PyObject*)&PySlice_Type)", py_object_type],
"file": ["((PyObject*)&PyFile_Type)", py_object_type],
"None": ["Py_None", py_object_type],
"False": ["Py_False", py_object_type],
"True": ["Py_True", py_object_type],
}
class ModuleScope(Scope):
# module_name string Python name of the module
# module_cname string C name of Python module object
# #module_dict_cname string C name of module dict object
# method_table_cname string C name of method table
# doc string Module doc string
# doc_cname string C name of module doc string
# const_counter integer Counter for naming constants
# utility_code_used [string] Utility code to be included
# default_entries [Entry] Function argument default entries
# python_include_files [string] Standard Python headers to be included
# include_files [string] Other C headers to be included
# string_to_entry {string : Entry} Map string const to entry
# context Context
# parent_module Scope Parent in the import namespace
# module_entries {string : Entry} For cimport statements
# type_names {string : 1} Set of type names (used during parsing)
# pxd_file_loaded boolean Corresponding .pxd file has been processed
# cimported_modules [ModuleScope] Modules imported with cimport
# intern_map {string : string} Mapping from Python names to interned strs
# interned_names [string] Interned names pending generation of declarations
# interned_nums [int/long] Interned numeric constants
# all_pystring_entries [Entry] Python string consts from all scopes
# types_imported {PyrexType : 1} Set of types for which import code generated
is_module_scope = 1
def __init__(self, name, parent_module, context):
self.parent_module = parent_module
outer_scope = context.find_submodule("__builtin__")
Scope.__init__(self, name, outer_scope, parent_module)
self.module_name = name
self.context = context
self.module_cname = Naming.module_cname
self.module_dict_cname = Naming.moddict_cname
self.method_table_cname = Naming.methtable_cname
self.doc = ""
self.doc_cname = Naming.moddoc_cname
self.const_counter = 1
self.utility_code_used = []
self.default_entries = []
self.module_entries = {}
self.python_include_files = ["Python.h", "structmember.h"]
self.include_files = []
self.type_names = {}
self.pxd_file_loaded = 0
self.cimported_modules = []
self.intern_map = {}
self.interned_names = []
self.interned_nums = []
self.interned_objs = []
self.all_pystring_entries = []
self.types_imported = {}
self.pynum_entries = []
self.has_extern_class = 0
self.cached_builtins = []
self.undeclared_cached_builtins = []
def qualifying_scope(self):
return self.parent_module
def global_scope(self):
return self
def declare_builtin(self, name, pos):
if not hasattr(__builtin__, name):
if self.outer_scope is not None:
return self.outer_scope.declare_builtin(name, pos)
else:
error(pos, "undeclared name not builtin: %s"%name)
if Options.cache_builtins:
for entry in self.cached_builtins:
if entry.name == name:
return entry
entry = self.declare(None, None, py_object_type, pos)
if Options.cache_builtins:
entry.is_builtin = 1
entry.is_const = 1
entry.name = name
entry.cname = Naming.builtin_prefix + name
self.cached_builtins.append(entry)
self.undeclared_cached_builtins.append(entry)
else:
entry.is_builtin = 1
return entry
def intern(self, name):
intern_map = self.intern_map
cname = intern_map.get(name)
if not cname:
cname = Naming.interned_prefix + name
intern_map[name] = cname
self.interned_names.append(name)
return cname
def find_module(self, module_name, pos):
# Find a module in the import namespace, interpreting
# relative imports relative to this module's parent.
# Finds and parses the module's .pxd file if the module
# has not been referenced before.
return self.global_scope().context.find_module(
module_name, relative_to = self.parent_module, pos = pos)
def find_submodule(self, name):
# Find and return scope for a submodule of this module,
# creating a new empty one if necessary. Doesn't parse .pxd.
scope = self.lookup_submodule(name)
if not scope:
scope = ModuleScope(name,
parent_module = self, context = self.context)
self.module_entries[name] = scope
return scope
def lookup_submodule(self, name):
# Return scope for submodule of this module, or None.
return self.module_entries.get(name, None)
def add_include_file(self, filename):
if filename not in self.python_include_files \
and filename not in self.include_files:
self.include_files.append(filename)
def add_imported_module(self, scope):
if scope not in self.cimported_modules:
self.cimported_modules.append(scope)
def add_imported_entry(self, name, entry, pos):
if entry not in self.entries:
self.entries[name] = entry
else:
warning(pos, "'%s' redeclared " % name, 0)
def declare_module(self, name, scope, pos):
# Declare a cimported module. This is represented as a
# Python module-level variable entry with a module
# scope attached to it. Reports an error and returns
# None if previously declared as something else.
entry = self.lookup_here(name)
if entry:
if entry.is_pyglobal and entry.as_module is scope:
return entry # Already declared as the same module
if not (entry.is_pyglobal and not entry.as_module):
# SAGE -- I put this here so Pyrex
# cimport's work across directories.
# Currently it tries to multiply define
# every module appearing in an import list.
# It shouldn't be an error for a module
# name to appear again, and indeed the generated
# code compiles fine.
return entry
warning(pos, "'%s' redeclared " % name, 0)
return None
else:
entry = self.declare_var(name, py_object_type, pos)
entry.as_module = scope
self.cimported_modules.append(scope)
return entry
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a global variable. If it is a Python
# object type, and not declared with cdef, it will live
# in the module dictionary, otherwise it will be a C
# global variable.
entry = Scope.declare_var(self, name, type, pos,
cname, visibility, is_cdef)
if not visibility in ('private', 'public', 'extern'):
error(pos, "Module-level variable cannot be declared %s" % visibility)
if not is_cdef:
if not (type.is_pyobject and not type.is_extension_type):
raise InternalError(
"Non-cdef global variable is not a generic Python object")
entry.is_pyglobal = 1
entry.namespace_cname = self.module_cname
#if Options.intern_names:
# entry.interned_cname = self.intern(name)
else:
entry.is_cglobal = 1
self.var_entries.append(entry)
return entry
def declare_global(self, name, pos):
entry = self.lookup_here(name)
if not entry:
self.declare_var(name, py_object_type, pos)
def add_default_value(self, type):
# Add an entry for holding a function argument
# default value.
cname = self.new_const_cname()
entry = Entry("", cname, type)
self.default_entries.append(entry)
return entry
def new_const_cname(self, prefix=''):
# Create a new globally-unique name for a constant.
n = self.const_counter
self.const_counter = n + 1
return "%s%s_%d" % (Naming.const_prefix, prefix, n)
def use_utility_code(self, new_code):
# Add string to list of utility code to be included,
# if not already there (tested using 'is').
for old_code in self.utility_code_used:
if old_code is new_code:
return
self.utility_code_used.append(new_code)
def declare_c_class(self, name, pos, defining, implementing,
module_name, base_type, objstruct_cname, typeobj_cname,
visibility, typedef_flag, api):
#
# Look for previous declaration as a type
#
entry = self.lookup_here(name)
if entry:
type = entry.type
if not (entry.is_type and type.is_extension_type):
entry = None # Will cause an error when we redeclare it
else:
self.check_previous_typedef_flag(entry, typedef_flag, pos)
if base_type != type.base_type:
error(pos, "Base type does not match previous declaration")
#
# Make a new entry if needed
#
if not entry:
type = PyExtensionType(name, typedef_flag, base_type)
type.pos = pos
if visibility == 'extern':
type.module_name = module_name
else:
type.module_name = self.qualified_name
type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
entry = self.declare_type(name, type, pos, visibility = visibility,
defining = 0)
if objstruct_cname:
type.objstruct_cname = objstruct_cname
elif not entry.in_cinclude:
type.objstruct_cname = self.mangle(Naming.objstruct_prefix, name)
else:
error(entry.pos,
"Object name required for 'public' or 'extern' C class")
self.attach_var_entry_to_c_class(entry)
self.c_class_entries.append(entry)
#
# Check for re-definition and create scope if needed
#
if not type.scope:
if defining or implementing:
scope = CClassScope(name = name, outer_scope = self,
visibility = visibility)
if base_type:
scope.declare_inherited_c_attributes(base_type.scope)
type.set_scope(scope)
self.type_entries.append(entry)
else:
self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
else:
if defining and type.scope.defined:
error(pos, "C class '%s' already defined" % name)
elif implementing and type.scope.implemented:
error(pos, "C class '%s' already implemented" % name)
#
# Fill in options, checking for compatibility with any previous declaration
#
if defining:
entry.defined_in_pxd = 1
if implementing: # So that filenames in runtime exceptions refer to
entry.pos = pos # the .pyx file and not the .pxd file
if visibility != 'private' and entry.visibility != visibility:
error(pos, "Class '%s' previously declared as '%s'"
% (name, entry.visibility))
if api:
entry.api = 1
if objstruct_cname:
if type.objstruct_cname and type.objstruct_cname != objstruct_cname:
error(pos, "Object struct name differs from previous declaration")
type.objstruct_cname = objstruct_cname
if typeobj_cname:
if type.typeobj_cname and type.typeobj_cname != typeobj_cname:
error(pos, "Type object name differs from previous declaration")
type.typeobj_cname = typeobj_cname
#
# Return new or existing entry
#
return entry
def check_for_illegal_incomplete_ctypedef(self, typedef_flag, pos):
if typedef_flag and not self.in_cinclude:
error(pos, "Forward-referenced type must use 'cdef', not 'ctypedef'")
def allocate_vtable_names(self, entry):
# If extension type has a vtable, allocate vtable struct and
# slot names for it.
type = entry.type
if type.base_type and type.base_type.vtabslot_cname:
#print "...allocating vtabslot_cname because base type has one" ###
type.vtabslot_cname = "%s.%s" % (
Naming.obj_base_cname, type.base_type.vtabslot_cname)
elif type.scope and type.scope.cfunc_entries:
#print "...allocating vtabslot_cname because there are C methods" ###
type.vtabslot_cname = Naming.vtabslot_cname
if type.vtabslot_cname:
#print "...allocating other vtable related cnames" ###
type.vtabstruct_cname = self.mangle(Naming.vtabstruct_prefix, entry.name)
type.vtabptr_cname = self.mangle(Naming.vtabptr_prefix, entry.name)
def check_c_classes(self):
# Performs post-analysis checking and finishing up of extension types
# being implemented in this module. This is called only for the main
# .pyx file scope, not for cimported .pxd scopes.
#
# Checks all extension types declared in this scope to
# make sure that:
#
# * The extension type is implemented
# * All required object and type names have been specified or generated
# * All non-inherited C methods are implemented
#
# Also allocates a name for the vtable if needed.
#
debug_check_c_classes = 0
if debug_check_c_classes:
print("Scope.check_c_classes: checking scope " + self.qualified_name)
for entry in self.c_class_entries:
if debug_check_c_classes:
print("...entry %s %s" % (entry.name, entry))
print("......type = " + entry.type)
print("......visibility = " + entry.visibility)
type = entry.type
name = entry.name
visibility = entry.visibility
# Check defined
if not type.scope:
error(entry.pos, "C class '%s' is declared but not defined" % name)
# Generate typeobj_cname
if visibility != 'extern' and not type.typeobj_cname:
type.typeobj_cname = self.mangle(Naming.typeobj_prefix, name)
## Generate typeptr_cname
#type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
# Check C methods defined
if type.scope:
for method_entry in type.scope.cfunc_entries:
if not method_entry.is_inherited and not method_entry.func_cname:
error(method_entry.pos, "C method '%s' is declared but not defined" %
method_entry.name)
# Allocate vtable name if necessary
if type.vtabslot_cname:
#print "ModuleScope.check_c_classes: allocating vtable cname for", self ###
type.vtable_cname = self.mangle(Naming.vtable_prefix, entry.name)
def attach_var_entry_to_c_class(self, entry):
# The name of an extension class has to serve as both a type
# name and a variable name holding the type object. It is
# represented in the symbol table by a type entry with a
# variable entry attached to it. For the variable entry,
# we use a read-only C global variable whose name is an
# expression that refers to the type object.
var_entry = Entry(name = entry.name,
type = py_object_type,
pos = entry.pos,
cname = "((PyObject*)%s)" % entry.type.typeptr_cname)
var_entry.is_variable = 1
var_entry.is_cglobal = 1
var_entry.is_readonly = 1
entry.as_variable = var_entry
class LocalScope(Scope):
def __init__(self, name, outer_scope):
Scope.__init__(self, name, outer_scope, outer_scope)
def mangle(self, prefix, name):
return prefix + name
def declare_arg(self, name, type, pos):
# Add an entry for an argument of a function.
cname = self.mangle(Naming.var_prefix, name)
entry = self.declare(name, cname, type, pos)
entry.is_variable = 1
if type.is_pyobject:
entry.init = "0"
entry.is_arg = 1
#entry.borrowed = 1 # Not using borrowed arg refs for now
self.arg_entries.append(entry)
self.control_flow.set_state((), (name, 'source'), 'arg')
return entry
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a local variable.
if visibility in ('public', 'readonly'):
error(pos, "Local variable cannot be declared %s" % visibility)
entry = Scope.declare_var(self, name, type, pos,
cname, visibility, is_cdef)
if type.is_pyobject and not Options.init_local_none:
entry.init = "0"
entry.init_to_none = type.is_pyobject and Options.init_local_none
entry.is_local = 1
self.var_entries.append(entry)
return entry
def declare_global(self, name, pos):
# Pull entry from global scope into local scope.
if self.lookup_here(name):
warning(pos, "'%s' redeclared ", 0)
else:
entry = self.global_scope().lookup_target(name)
self.entries[name] = entry
class StructOrUnionScope(Scope):
# Namespace of a C struct or union.
def __init__(self):
Scope.__init__(self, "?", None, None)
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0, allow_pyobject = 0):
# Add an entry for an attribute.
if not cname:
cname = name
if type.is_cfunction:
type = CPtrType(type)
entry = self.declare(name, cname, type, pos)
entry.is_variable = 1
self.var_entries.append(entry)
if type.is_pyobject and not allow_pyobject:
error(pos,
"C struct/union member cannot be a Python object")
if visibility != 'private':
error(pos,
"C struct/union member cannot be declared %s" % visibility)
return entry
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', defining = 0, api = 0, in_pxd = 0):
self.declare_var(name, type, pos, cname, visibility)
class ClassScope(Scope):
# Abstract base class for namespace of
# Python class or extension type.
#
# class_name string Pyrex name of the class
# scope_prefix string Additional prefix for names
# declared in the class
# doc string or None Doc string
def __init__(self, name, outer_scope):
Scope.__init__(self, name, outer_scope, outer_scope)
self.class_name = name
self.doc = None
def add_string_const(self, value):
return self.outer_scope.add_string_const(value)
def lookup(self, name):
if name == "classmethod":
# We don't want to use the builtin classmethod here 'cause it won't do the
# right thing in this scope (as the class memebers aren't still functions).
# Don't want to add a cfunction to this scope 'cause that would mess with
# the type definition, so we just return the right entry.
self.use_utility_code(classmethod_utility_code)
entry = Entry("classmethod",
"__Pyx_Method_ClassMethod",
CFuncType(py_object_type, [CFuncTypeArg("", py_object_type, None)], 0, 0))
entry.is_cfunction = 1
return entry
else:
return Scope.lookup(self, name)
class PyClassScope(ClassScope):
# Namespace of a Python class.
#
# class_dict_cname string C variable holding class dict
# class_obj_cname string C variable holding class object
is_py_class_scope = 1
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname, visibility, is_cdef)
entry.is_pyglobal = 1
entry.namespace_cname = self.class_obj_cname
#if Options.intern_names:
# entry.interned_cname = self.intern(name)
return entry
def allocate_temp(self, type):
return self.outer_scope.allocate_temp(type)
def release_temp(self, cname):
self.outer_scope.release_temp(cname)
#def recycle_pending_temps(self):
# self.outer_scope.recycle_pending_temps()
def add_default_value(self, type):
return self.outer_scope.add_default_value(type)
class CClassScope(ClassScope):
# Namespace of an extension type.
#
# parent_type CClassType
# #typeobj_cname string or None
# #objstruct_cname string
# method_table_cname string
# member_table_cname string
# getset_table_cname string
# has_pyobject_attrs boolean Any PyObject attributes?
# public_attr_entries boolean public/readonly attrs
# property_entries [Entry]
# defined boolean Defined in .pxd file
# implemented boolean Defined in .pyx file
# inherited_var_entries [Entry] Adapted var entries from base class
is_c_class_scope = 1
def __init__(self, name, outer_scope, visibility):
ClassScope.__init__(self, name, outer_scope)
if visibility != 'extern':
self.method_table_cname = outer_scope.mangle(Naming.methtab_prefix, name)
self.member_table_cname = outer_scope.mangle(Naming.memtab_prefix, name)
self.getset_table_cname = outer_scope.mangle(Naming.gstab_prefix, name)
self.has_pyobject_attrs = 0
self.public_attr_entries = []
self.property_entries = []
self.inherited_var_entries = []
self.defined = 0
self.implemented = 0
def needs_gc(self):
# If the type or any of its base types have Python-valued
# C attributes, then it needs to participate in GC.
return self.has_pyobject_attrs or \
(self.parent_type.base_type and \
self.parent_type.base_type.scope.needs_gc())
def declare_var(self, name, type, pos,
cname = None, visibility = 'private', is_cdef = 0):
if is_cdef:
# Add an entry for an attribute.
if self.defined:
error(pos,
"C attributes cannot be added in implementation part of"
" extension type")
if get_special_method_signature(name):
error(pos,
"The name '%s' is reserved for a special method."
% name)
if not cname:
cname = name
entry = self.declare(name, cname, type, pos)
entry.visibility = visibility
entry.is_variable = 1
self.var_entries.append(entry)
if type.is_pyobject:
self.has_pyobject_attrs = 1
if visibility not in ('private', 'public', 'readonly'):
error(pos,
"Attribute of extension type cannot be declared %s" % visibility)
if visibility in ('public', 'readonly'):
if type.pymemberdef_typecode:
self.public_attr_entries.append(entry)
if name == "__weakref__":
error(pos, "Special attribute __weakref__ cannot be exposed to Python")
else:
error(pos,
"C attribute of type '%s' cannot be accessed from Python" % type)
if visibility == 'public' and type.is_extension_type:
error(pos,
"Non-generic Python attribute cannot be exposed for writing from Python")
return entry
else:
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname, visibility, is_cdef)
entry.is_member = 1
entry.is_pyglobal = 1 # xxx: is_pyglobal changes behaviour in so many places that
# I keep it in for now. is_member should be enough
# later on
entry.namespace_cname = "(PyObject *)%s" % self.parent_type.typeptr_cname
if Options.intern_names:
entry.interned_cname = self.intern(name)
return entry
def declare_pyfunction(self, name, pos):
# Add an entry for a method.
if name in ('__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__'):
error(pos, "Special method %s must be implemented via __richcmp__" % name)
if name == "__new__":
warning(pos, "__new__ method of extension type will change semantics "
"in a future version of Pyrex and Cython. Use __cinit__ instead.")
name = "__cinit__"
entry = self.declare_var(name, py_object_type, pos)
special_sig = get_special_method_signature(name)
if special_sig:
# Special methods get put in the method table with a particular
# signature declared in advance.
entry.signature = special_sig
entry.is_special = 1
else:
entry.signature = pymethod_signature
entry.is_special = 0
self.pyfunc_entries.append(entry)
return entry
def lookup_here(self, name):
if name == "__new__":
name = "__cinit__"
return ClassScope.lookup_here(self, name)
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', defining = 0, api = 0, in_pxd = 0):
if get_special_method_signature(name):
error(pos, "Special methods must be declared with 'def', not 'cdef'")
args = type.args
if not args:
error(pos, "C method has no self argument")
elif not args[0].type.same_as(self.parent_type):
error(pos, "Self argument of C method does not match parent type")
entry = self.lookup_here(name)
if entry:
if not entry.is_cfunction:
warning(pos, "'%s' redeclared " % name, 0)
else:
if defining and entry.func_cname:
error(pos, "'%s' already defined" % name)
#print "CClassScope.declare_cfunction: checking signature" ###
if type.same_c_signature_as(entry.type, as_cmethod = 1):
pass
elif type.compatible_signature_with(entry.type, as_cmethod = 1):
if type.optional_arg_count and not type.original_sig.optional_arg_count:
# Need to put a wrapper taking no optional arguments
# into the method table.
wrapper_func_cname = self.mangle(Naming.func_prefix, name) + Naming.no_opt_args
wrapper_func_name = name + Naming.no_opt_args
if entry.type.optional_arg_count:
old_entry = self.lookup_here(wrapper_func_name)
old_entry.func_cname = wrapper_func_cname
else:
entry.func_cname = wrapper_func_cname
entry.name = wrapper_func_name
entry = self.add_cfunction(name, type, pos, cname or name, visibility)
defining = 1
entry.type = type
# if type.narrower_c_signature_than(entry.type, as_cmethod = 1):
# entry.type = type
else:
error(pos, "Signature not compatible with previous declaration")
else:
if self.defined:
error(pos,
"C method '%s' not previously declared in definition part of"
" extension type" % name)
entry = self.add_cfunction(name, type, pos, cname or name, visibility)
if defining:
entry.func_cname = self.mangle(Naming.func_prefix, name)
return entry
def add_cfunction(self, name, type, pos, cname, visibility):
# Add a cfunction entry without giving it a func_cname.
entry = ClassScope.add_cfunction(self, name, type, pos, cname, visibility)
entry.is_cmethod = 1
return entry
def declare_property(self, name, doc, pos):
entry = self.declare(name, name, py_object_type, pos)
entry.is_property = 1
entry.doc = doc
entry.scope = PropertyScope(name,
outer_scope = self.global_scope(), parent_scope = self)
entry.scope.parent_type = self.parent_type
self.property_entries.append(entry)
return entry
def declare_inherited_c_attributes(self, base_scope):
# Declare entries for all the C attributes of an
# inherited type, with cnames modified appropriately
# to work with this type.
def adapt(cname):
return "%s.%s" % (Naming.obj_base_cname, base_entry.cname)
for base_entry in \
base_scope.inherited_var_entries + base_scope.var_entries:
entry = self.declare(base_entry.name, adapt(base_entry.cname),
base_entry.type, None)
entry.is_variable = 1
self.inherited_var_entries.append(entry)
for base_entry in base_scope.cfunc_entries:
entry = self.add_cfunction(base_entry.name, base_entry.type, None,
adapt(base_entry.cname), base_entry.visibility)
entry.is_inherited = 1
def allocate_temp(self, type):
return Scope.allocate_temp(self.global_scope(), type)
def release_temp(self, cname):
return Scope.release_temp(self.global_scope(), cname)
class PropertyScope(Scope):
# Scope holding the __get__, __set__ and __del__ methods for
# a property of an extension type.
#
# parent_type PyExtensionType The type to which the property belongs
def declare_pyfunction(self, name, pos):
# Add an entry for a method.
signature = get_property_accessor_signature(name)
if signature:
entry = self.declare(name, name, py_object_type, pos)
entry.is_special = 1
entry.signature = signature
return entry
else:
error(pos, "Only __get__, __set__ and __del__ methods allowed "
"in a property declaration")
return None
# Should this go elsewhere (and then get imported)?
#------------------------------------------------------------------------------------
classmethod_utility_code = [
"""
#include "descrobject.h"
static PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/
""","""
static PyObject* __Pyx_Method_ClassMethod(PyObject *method) {
/* It appears that PyMethodDescr_Type is not anywhere exposed in the Python/C API */
/* if (!PyObject_TypeCheck(method, &PyMethodDescr_Type)) { */
if (strcmp(method->ob_type->tp_name, "method_descriptor") == 0) { /* cdef classes */
PyMethodDescrObject *descr = (PyMethodDescrObject *)method;
return PyDescr_NewClassMethod(descr->d_type, descr->d_method);
}
else if (PyMethod_Check(method)) { /* python classes */
return PyClassMethod_New(PyMethod_GET_FUNCTION(method));
}
PyErr_Format(PyExc_TypeError, "Class-level classmethod() can only be called on a method_descriptor or instance method.");
return NULL;
}
"""
]
#
# Tree transform framework
#
import Nodes
import ExprNodes
class Transform(object):
# parent_stack [Node] A stack providing information about where in the tree
# we currently are. Nodes here should be considered
# read-only.
# Transforms for the parse tree should usually extend this class for convenience.
# The caller of a transform will only first call initialize and then process_node on
# the root node, the rest are utility functions and conventions.
# Transformations usually happens by recursively filtering through the stream.
# process_node is always expected to return a new node, however it is ok to simply
# return the input node untouched. Returning None will remove the node from the
# parent.
def __init__(self):
self.parent_stack = []
def initialize(self, phase, **options):
pass
def process_children(self, node):
"""For all children of node, either process_list (if isinstance(node, list))
or process_node (otherwise) is called."""
if node == None: return
self.parent_stack.append(node)
for childacc in node.get_child_accessors():
child = childacc.get()
if isinstance(child, list):
newchild = self.process_list(child, childacc.name())
if not isinstance(newchild, list): raise Exception("Cannot replace list with non-list!")
else:
newchild = self.process_node(child, childacc.name())
if newchild is not None and not isinstance(newchild, Nodes.Node):
raise Exception("Cannot replace Node with non-Node!")
childacc.set(newchild)
self.parent_stack.pop()
def process_list(self, l, name):
"""Calls process_node on all the items in l, using the name one gets when appending
[idx] to the name. Each item in l is transformed in-place by the item process_node
returns, then l is returned."""
# Comment: If moving to a copying strategy, it might makes sense to return a
# new list instead.
for idx in xrange(len(l)):
l[idx] = self.process_node(l[idx], "%s[%d]" % (name, idx))
return l
def process_node(self, node, name):
"""Override this method to process nodes. name specifies which kind of relation the
parent has with child. This method should always return the node which the parent
should use for this relation, which can either be the same node, None to remove
the node, or a different node."""
raise InternalError("Not implemented")
class PrintTree(Transform):
"""Prints a representation of the tree to standard output.
Subclass and override repr_of to provide more information
about nodes. """
def __init__(self):
Transform.__init__(self)
self._indent = ""
def indent(self):
self._indent += " "
def unindent(self):
self._indent = self._indent[:-2]
def initialize(self, phase, **options):
print("Parse tree dump at phase '%s'" % phase)
# Don't do anything about process_list, the defaults gives
# nice-looking name[idx] nodes which will visually appear
# under the parent-node, not displaying the list itself in
# the hierarchy.
def process_node(self, node, name):
print("%s- %s: %s" % (self._indent, name, self.repr_of(node)))
self.indent()
self.process_children(node)
self.unindent()
return node
def repr_of(self, node):
if node is None:
return "(none)"
else:
result = node.__class__.__name__
if isinstance(node, ExprNodes.ExprNode):
t = node.type
result += "(type=%s)" % repr(t)
return result
PHASES = [
'before_analyse_function', # run in FuncDefNode.generate_function_definitions
'after_analyse_function' # run in FuncDefNode.generate_function_definitions
]
class TransformSet(dict):
def __init__(self):
for name in PHASES:
self[name] = []
def run(self, name, node, **options):
assert name in self
for transform in self[name]:
transform.initialize(phase=name, **options)
transform.process_node(node, "(root)")
#
# Pyrex - Tables describing slots in the type object
# and associated know-how.
#
import Naming
import PyrexTypes
import sys
class Signature:
# Method slot signature descriptor.
#
# has_dummy_arg boolean
# has_generic_args boolean
# fixed_arg_format string
# ret_format string
# error_value string
#
# The formats are strings made up of the following
# characters:
#
# 'O' Python object
# 'T' Python object of the type of 'self'
# 'v' void
# 'p' void *
# 'P' void **
# 'i' int
# 'I' int *
# 'l' long
# 'Z' Py_ssize_t
# 's' char *
# 'S' char **
# 'r' int used only to signal exception
# '-' dummy 'self' argument (not used)
# '*' rest of args passed as generic Python
# arg tuple and kw dict (must be last
# char in format string)
format_map = {
'O': PyrexTypes.py_object_type,
'v': PyrexTypes.c_void_type,
'p': PyrexTypes.c_void_ptr_type,
'P': PyrexTypes.c_void_ptr_ptr_type,
'i': PyrexTypes.c_int_type,
'b': PyrexTypes.c_bint_type,
'I': PyrexTypes.c_int_ptr_type,
'l': PyrexTypes.c_long_type,
'Z': PyrexTypes.c_py_ssize_t_type,
's': PyrexTypes.c_char_ptr_type,
'S': PyrexTypes.c_char_ptr_ptr_type,
'r': PyrexTypes.c_returncode_type,
# 'T', '-' and '*' are handled otherwise
# and are not looked up in here
}
error_value_map = {
'O': "NULL",
'i': "-1",
'b': "-1",
'l': "-1",
'r': "-1",
'Z': "-1",
}
def __init__(self, arg_format, ret_format):
self.has_dummy_arg = 0
self.has_generic_args = 0
if arg_format[:1] == '-':
self.has_dummy_arg = 1
arg_format = arg_format[1:]
if arg_format[-1:] == '*':
self.has_generic_args = 1
arg_format = arg_format[:-1]
self.fixed_arg_format = arg_format
self.ret_format = ret_format
self.error_value = self.error_value_map.get(ret_format, None)
def num_fixed_args(self):
return len(self.fixed_arg_format)
def is_self_arg(self, i):
return self.fixed_arg_format[i] == 'T'
def fixed_arg_type(self, i):
return self.format_map[self.fixed_arg_format[i]]
def return_type(self):
return self.format_map[self.ret_format]
def exception_value(self):
return self.error_value_map.get(self.ret_format)
def function_type(self):
# Construct a C function type descriptor for this signature
args = []
for i in xrange(self.num_fixed_args()):
arg_type = self.fixed_arg_type(i)
args.append(PyrexTypes.CFuncTypeArg("", arg_type, None))
ret_type = self.return_type()
exc_value = self.exception_value()
return PyrexTypes.CFuncType(ret_type, args, exception_value = exc_value)
def method_flags(self):
if self.ret_format == "O":
full_args = self.fixed_arg_format
if self.has_dummy_arg:
full_args = "O" + full_args
if full_args in ["O", "T"]:
if self.has_generic_args:
return [method_varargs, method_keywords]
else:
return [method_noargs]
elif full_args in ["OO", "TO"] and not self.has_generic_args:
return [method_onearg]
return None
class SlotDescriptor:
# Abstract base class for type slot descriptors.
#
# slot_name string Member name of the slot in the type object
# is_initialised_dynamically Is initialised by code in the module init function
# flag Py_TPFLAGS_XXX value indicating presence of slot
def __init__(self, slot_name, dynamic = 0, flag = None):
self.slot_name = slot_name
self.is_initialised_dynamically = dynamic
self.flag = flag
def generate(self, scope, code):
if self.is_initialised_dynamically:
value = 0
else:
value = self.slot_code(scope)
flag = self.flag
if flag:
code.putln("#if Py_TPFLAGS_DEFAULT & %s" % flag)
code.putln("%s, /*%s*/" % (value, self.slot_name))
if flag:
code.putln("#endif")
# Some C implementations have trouble statically
# initialising a global with a pointer to an extern
# function, so we initialise some of the type slots
# in the module init function instead.
def generate_dynamic_init_code(self, scope, code):
if self.is_initialised_dynamically:
value = self.slot_code(scope)
if value != "0":
code.putln("%s.%s = %s;" % (
scope.parent_type.typeobj_cname,
self.slot_name,
value
)
)
class FixedSlot(SlotDescriptor):
# Descriptor for a type slot with a fixed value.
#
# value string
def __init__(self, slot_name, value):
SlotDescriptor.__init__(self, slot_name)
self.value = value
def slot_code(self, scope):
return self.value
class EmptySlot(FixedSlot):
# Descriptor for a type slot whose value is always 0.
def __init__(self, slot_name):
FixedSlot.__init__(self, slot_name, "0")
class MethodSlot(SlotDescriptor):
# Type slot descriptor for a user-definable method.
#
# signature Signature
# method_name string The __xxx__ name of the method
# default string or None Default value of the slot
def __init__(self, signature, slot_name, method_name, default = None, flag = None):
SlotDescriptor.__init__(self, slot_name, flag = flag)
self.signature = signature
self.slot_name = slot_name
self.method_name = method_name
self.default = default
method_name_to_slot[method_name] = self
def slot_code(self, scope):
entry = scope.lookup_here(self.method_name)
if entry:
return entry.func_cname
else:
return "0"
class InternalMethodSlot(SlotDescriptor):
# Type slot descriptor for a method which is always
# synthesized by Cython.
#
# slot_name string Member name of the slot in the type object
def __init__(self, slot_name):
SlotDescriptor.__init__(self, slot_name)
def slot_code(self, scope):
return scope.mangle_internal(self.slot_name)
class GCDependentSlot(InternalMethodSlot):
# Descriptor for a slot whose value depends on whether
# the type participates in GC.
def __init__(self, slot_name):
InternalMethodSlot.__init__(self, slot_name)
def slot_code(self, scope):
if not scope.needs_gc():
return "0"
if not scope.has_pyobject_attrs:
# if the type does not have object attributes, it can
# delegate GC methods to its parent - iff the parent
# functions are defined in the same module
parent_type_scope = scope.parent_type.base_type.scope
if scope.parent_scope is parent_type_scope.parent_scope:
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
if entry.visibility != 'extern':
return self.slot_code(parent_type_scope)
return InternalMethodSlot.slot_code(self, scope)
class ConstructorSlot(InternalMethodSlot):
# Descriptor for tp_new and tp_dealloc.
def __init__(self, slot_name, method):
InternalMethodSlot.__init__(self, slot_name)
self.method = method
def slot_code(self, scope):
if scope.parent_type.base_type \
and not scope.has_pyobject_attrs \
and not scope.lookup_here(self.method):
# if the type does not have object attributes, it can
# delegate GC methods to its parent - iff the parent
# functions are defined in the same module
parent_type_scope = scope.parent_type.base_type.scope
if scope.parent_scope is parent_type_scope.parent_scope:
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
if entry.visibility != 'extern':
return self.slot_code(parent_type_scope)
return InternalMethodSlot.slot_code(self, scope)
class SyntheticSlot(InternalMethodSlot):
# Type slot descriptor for a synthesized method which
# dispatches to one or more user-defined methods depending
# on its arguments. If none of the relevant methods are
# defined, the method will not be synthesized and an
# alternative default value will be placed in the type
# slot.
def __init__(self, slot_name, user_methods, default_value):
InternalMethodSlot.__init__(self, slot_name)
self.user_methods = user_methods
self.default_value = default_value
def slot_code(self, scope):
if scope.defines_any(self.user_methods):
return InternalMethodSlot.slot_code(self, scope)
else:
return self.default_value
class TypeFlagsSlot(SlotDescriptor):
# Descriptor for the type flags slot.
def slot_code(self, scope):
value = "Py_TPFLAGS_DEFAULT|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_BASETYPE"
if scope.needs_gc():
value += "|Py_TPFLAGS_HAVE_GC"
return value
class DocStringSlot(SlotDescriptor):
# Descriptor for the docstring slot.
def slot_code(self, scope):
if scope.doc is not None:
return '"%s"' % scope.doc
else:
return "0"
class SuiteSlot(SlotDescriptor):
# Descriptor for a substructure of the type object.
#
# sub_slots [SlotDescriptor]
def __init__(self, sub_slots, slot_type, slot_name):
SlotDescriptor.__init__(self, slot_name)
self.sub_slots = sub_slots
self.slot_type = slot_type
substructures.append(self)
def substructure_cname(self, scope):
return "%s%s_%s" % (Naming.pyrex_prefix, self.slot_name, scope.class_name)
def slot_code(self, scope):
return "&%s" % self.substructure_cname(scope)
def generate_substructure(self, scope, code):
code.putln("")
code.putln(
"static %s %s = {" % (
self.slot_type,
self.substructure_cname(scope)))
for slot in self.sub_slots:
slot.generate(scope, code)
code.putln("};")
substructures = [] # List of all SuiteSlot instances
class MethodTableSlot(SlotDescriptor):
# Slot descriptor for the method table.
def slot_code(self, scope):
return scope.method_table_cname
class MemberTableSlot(SlotDescriptor):
# Slot descriptor for the table of Python-accessible attributes.
def slot_code(self, scope):
if scope.public_attr_entries:
return scope.member_table_cname
else:
return "0"
class GetSetSlot(SlotDescriptor):
# Slot descriptor for the table of attribute get & set methods.
def slot_code(self, scope):
if scope.property_entries:
return scope.getset_table_cname
else:
return "0"
class BaseClassSlot(SlotDescriptor):
# Slot descriptor for the base class slot.
def __init__(self, name):
SlotDescriptor.__init__(self, name, dynamic = 1)
def generate_dynamic_init_code(self, scope, code):
base_type = scope.parent_type.base_type
if base_type:
code.putln("%s.%s = %s;" % (
scope.parent_type.typeobj_cname,
self.slot_name,
base_type.typeptr_cname))
# The following dictionary maps __xxx__ method names to slot descriptors.
method_name_to_slot = {}
## The following slots are (or could be) initialised with an
## extern function pointer.
#
#slots_initialised_from_extern = (
# "tp_free",
#)
#------------------------------------------------------------------------------------------
#
# Utility functions for accessing slot table data structures
#
#------------------------------------------------------------------------------------------
def get_special_method_signature(name):
# Given a method name, if it is a special method,
# return its signature, else return None.
slot = method_name_to_slot.get(name)
if slot:
return slot.signature
else:
return None
def get_property_accessor_signature(name):
# Return signature of accessor for an extension type
# property, else None.
return property_accessor_signatures.get(name)
def get_base_slot_function(scope, slot):
# Returns the function implementing this slot in the baseclass.
# This is useful for enabling the compiler to optimize calls
# that recursively climb the class hierarchy.
base_type = scope.parent_type.base_type
if scope.parent_scope is base_type.scope.parent_scope:
parent_slot = slot.slot_code(base_type.scope)
if parent_slot != '0':
entry = scope.parent_scope.lookup_here(scope.parent_type.base_type.name)
if entry.visibility != 'extern':
return parent_slot
return None
#------------------------------------------------------------------------------------------
#
# Signatures for generic Python functions and methods.
#
#------------------------------------------------------------------------------------------
pyfunction_signature = Signature("-*", "O")
pymethod_signature = Signature("T*", "O")
#------------------------------------------------------------------------------------------
#
# Signatures for simple Python functions.
#
#------------------------------------------------------------------------------------------
pyfunction_noargs = Signature("-", "O")
pyfunction_onearg = Signature("-O", "O")
#------------------------------------------------------------------------------------------
#
# Signatures for the various kinds of function that
# can appear in the type object and its substructures.
#
#------------------------------------------------------------------------------------------
unaryfunc = Signature("T", "O") # typedef PyObject * (*unaryfunc)(PyObject *);
binaryfunc = Signature("OO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
ibinaryfunc = Signature("TO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
ternaryfunc = Signature("OOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
iternaryfunc = Signature("TOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
callfunc = Signature("T*", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
inquiry = Signature("T", "i") # typedef int (*inquiry)(PyObject *);
lenfunc = Signature("T", "Z") # typedef Py_ssize_t (*lenfunc)(PyObject *);
# typedef int (*coercion)(PyObject **, PyObject **);
intargfunc = Signature("Ti", "O") # typedef PyObject *(*intargfunc)(PyObject *, int);
ssizeargfunc = Signature("TZ", "O") # typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t);
intintargfunc = Signature("Tii", "O") # typedef PyObject *(*intintargfunc)(PyObject *, int, int);
ssizessizeargfunc = Signature("TZZ", "O") # typedef PyObject *(*ssizessizeargfunc)(PyObject *, Py_ssize_t, Py_ssize_t);
intobjargproc = Signature("TiO", 'r') # typedef int(*intobjargproc)(PyObject *, int, PyObject *);
ssizeobjargproc = Signature("TZO", 'r') # typedef int(*ssizeobjargproc)(PyObject *, Py_ssize_t, PyObject *);
intintobjargproc = Signature("TiiO", 'r') # typedef int(*intintobjargproc)(PyObject *, int, int, PyObject *);
ssizessizeobjargproc = Signature("TZZO", 'r') # typedef int(*ssizessizeobjargproc)(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *);
intintargproc = Signature("Tii", 'r')
ssizessizeargproc = Signature("TZZ", 'r')
objargfunc = Signature("TO", "O")
objobjargproc = Signature("TOO", 'r') # typedef int (*objobjargproc)(PyObject *, PyObject *, PyObject *);
getreadbufferproc = Signature("TiP", 'i') # typedef int (*getreadbufferproc)(PyObject *, int, void **);
getwritebufferproc = Signature("TiP", 'i') # typedef int (*getwritebufferproc)(PyObject *, int, void **);
getsegcountproc = Signature("TI", 'i') # typedef int (*getsegcountproc)(PyObject *, int *);
getcharbufferproc = Signature("TiS", 'i') # typedef int (*getcharbufferproc)(PyObject *, int, const char **);
readbufferproc = Signature("TZP", "Z") # typedef Py_ssize_t (*readbufferproc)(PyObject *, Py_ssize_t, void **);
writebufferproc = Signature("TZP", "Z") # typedef Py_ssize_t (*writebufferproc)(PyObject *, Py_ssize_t, void **);
segcountproc = Signature("TZ", "Z") # typedef Py_ssize_t (*segcountproc)(PyObject *, Py_ssize_t *);
charbufferproc = Signature("TZS", "Z") # typedef Py_ssize_t (*charbufferproc)(PyObject *, Py_ssize_t, char **);
objargproc = Signature("TO", 'r') # typedef int (*objobjproc)(PyObject *, PyObject *);
# typedef int (*visitproc)(PyObject *, void *);
# typedef int (*traverseproc)(PyObject *, visitproc, void *);
destructor = Signature("T", "v") # typedef void (*destructor)(PyObject *);
# printfunc = Signature("TFi", 'r') # typedef int (*printfunc)(PyObject *, FILE *, int);
# typedef PyObject *(*getattrfunc)(PyObject *, char *);
getattrofunc = Signature("TO", "O") # typedef PyObject *(*getattrofunc)(PyObject *, PyObject *);
# typedef int (*setattrfunc)(PyObject *, char *, PyObject *);
setattrofunc = Signature("TOO", 'r') # typedef int (*setattrofunc)(PyObject *, PyObject *, PyObject *);
delattrofunc = Signature("TO", 'r')
cmpfunc = Signature("TO", "i") # typedef int (*cmpfunc)(PyObject *, PyObject *);
reprfunc = Signature("T", "O") # typedef PyObject *(*reprfunc)(PyObject *);
hashfunc = Signature("T", "l") # typedef long (*hashfunc)(PyObject *);
# typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
richcmpfunc = Signature("OOi", "O") # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
getiterfunc = Signature("T", "O") # typedef PyObject *(*getiterfunc) (PyObject *);
iternextfunc = Signature("T", "O") # typedef PyObject *(*iternextfunc) (PyObject *);
descrgetfunc = Signature("TOO", "O") # typedef PyObject *(*descrgetfunc) (PyObject *, PyObject *, PyObject *);
descrsetfunc = Signature("TOO", 'r') # typedef int (*descrsetfunc) (PyObject *, PyObject *, PyObject *);
descrdelfunc = Signature("TO", 'r')
initproc = Signature("T*", 'r') # typedef int (*initproc)(PyObject *, PyObject *, PyObject *);
# typedef PyObject *(*newfunc)(struct _typeobject *, PyObject *, PyObject *);
# typedef PyObject *(*allocfunc)(struct _typeobject *, int);
#------------------------------------------------------------------------------------------
#
# Signatures for accessor methods of properties.
#
#------------------------------------------------------------------------------------------
property_accessor_signatures = {
'__get__': Signature("T", "O"),
'__set__': Signature("TO", 'r'),
'__del__': Signature("T", 'r')
}
#------------------------------------------------------------------------------------------
#
# Descriptor tables for the slots of the various type object
# substructures, in the order they appear in the structure.
#
#------------------------------------------------------------------------------------------
PyNumberMethods = (
MethodSlot(binaryfunc, "nb_add", "__add__"),
MethodSlot(binaryfunc, "nb_subtract", "__sub__"),
MethodSlot(binaryfunc, "nb_multiply", "__mul__"),
MethodSlot(binaryfunc, "nb_divide", "__div__"),
MethodSlot(binaryfunc, "nb_remainder", "__mod__"),
MethodSlot(binaryfunc, "nb_divmod", "__divmod__"),
MethodSlot(ternaryfunc, "nb_power", "__pow__"),
MethodSlot(unaryfunc, "nb_negative", "__neg__"),
MethodSlot(unaryfunc, "nb_positive", "__pos__"),
MethodSlot(unaryfunc, "nb_absolute", "__abs__"),
MethodSlot(inquiry, "nb_nonzero", "__nonzero__"),
MethodSlot(unaryfunc, "nb_invert", "__invert__"),
MethodSlot(binaryfunc, "nb_lshift", "__lshift__"),
MethodSlot(binaryfunc, "nb_rshift", "__rshift__"),
MethodSlot(binaryfunc, "nb_and", "__and__"),
MethodSlot(binaryfunc, "nb_xor", "__xor__"),
MethodSlot(binaryfunc, "nb_or", "__or__"),
EmptySlot("nb_coerce"),
MethodSlot(unaryfunc, "nb_int", "__int__"),
MethodSlot(unaryfunc, "nb_long", "__long__"),
MethodSlot(unaryfunc, "nb_float", "__float__"),
MethodSlot(unaryfunc, "nb_oct", "__oct__"),
MethodSlot(unaryfunc, "nb_hex", "__hex__"),
# Added in release 2.0
MethodSlot(ibinaryfunc, "nb_inplace_add", "__iadd__"),
MethodSlot(ibinaryfunc, "nb_inplace_subtract", "__isub__"),
MethodSlot(ibinaryfunc, "nb_inplace_multiply", "__imul__"),
MethodSlot(ibinaryfunc, "nb_inplace_divide", "__idiv__"),
MethodSlot(ibinaryfunc, "nb_inplace_remainder", "__imod__"),
MethodSlot(ternaryfunc, "nb_inplace_power", "__ipow__"), # NOT iternaryfunc!!!
MethodSlot(ibinaryfunc, "nb_inplace_lshift", "__ilshift__"),
MethodSlot(ibinaryfunc, "nb_inplace_rshift", "__irshift__"),
MethodSlot(ibinaryfunc, "nb_inplace_and", "__iand__"),
MethodSlot(ibinaryfunc, "nb_inplace_xor", "__ixor__"),
MethodSlot(ibinaryfunc, "nb_inplace_or", "__ior__"),
# Added in release 2.2
# The following require the Py_TPFLAGS_HAVE_CLASS flag
MethodSlot(binaryfunc, "nb_floor_divide", "__floordiv__"),
MethodSlot(binaryfunc, "nb_true_divide", "__truediv__"),
MethodSlot(ibinaryfunc, "nb_inplace_floor_divide", "__ifloordiv__"),
MethodSlot(ibinaryfunc, "nb_inplace_true_divide", "__itruediv__"),
# Added in release 2.5
MethodSlot(unaryfunc, "nb_index", "__index__", flag = "Py_TPFLAGS_HAVE_INDEX")
)
PySequenceMethods = (
MethodSlot(lenfunc, "sq_length", "__len__"),
EmptySlot("sq_concat"), # nb_add used instead
EmptySlot("sq_repeat"), # nb_multiply used instead
SyntheticSlot("sq_item", ["__getitem__"], "0"), #EmptySlot("sq_item"), # mp_subscript used instead
MethodSlot(ssizessizeargfunc, "sq_slice", "__getslice__"),
EmptySlot("sq_ass_item"), # mp_ass_subscript used instead
SyntheticSlot("sq_ass_slice", ["__setslice__", "__delslice__"], "0"),
MethodSlot(cmpfunc, "sq_contains", "__contains__"),
EmptySlot("sq_inplace_concat"), # nb_inplace_add used instead
EmptySlot("sq_inplace_repeat"), # nb_inplace_multiply used instead
)
PyMappingMethods = (
MethodSlot(lenfunc, "mp_length", "__len__"),
MethodSlot(objargfunc, "mp_subscript", "__getitem__"),
SyntheticSlot("mp_ass_subscript", ["__setitem__", "__delitem__"], "0"),
)
PyBufferProcs = (
MethodSlot(getreadbufferproc, "bf_getreadbuffer", "__getreadbuffer__"),
MethodSlot(getwritebufferproc, "bf_getwritebuffer", "__getwritebuffer__"),
MethodSlot(getsegcountproc, "bf_getsegcount", "__getsegcount__"),
MethodSlot(getcharbufferproc, "bf_getcharbuffer", "__getcharbuffer__"),
)
#------------------------------------------------------------------------------------------
#
# The main slot table. This table contains descriptors for all the
# top-level type slots, beginning with tp_dealloc, in the order they
# appear in the type object.
#
#------------------------------------------------------------------------------------------
slot_table = (
ConstructorSlot("tp_dealloc", '__dealloc__'),
EmptySlot("tp_print"), #MethodSlot(printfunc, "tp_print", "__print__"),
EmptySlot("tp_getattr"),
EmptySlot("tp_setattr"),
MethodSlot(cmpfunc, "tp_compare", "__cmp__"),
MethodSlot(reprfunc, "tp_repr", "__repr__"),
SuiteSlot(PyNumberMethods, "PyNumberMethods", "tp_as_number"),
SuiteSlot(PySequenceMethods, "PySequenceMethods", "tp_as_sequence"),
SuiteSlot(PyMappingMethods, "PyMappingMethods", "tp_as_mapping"),
MethodSlot(hashfunc, "tp_hash", "__hash__"),
MethodSlot(callfunc, "tp_call", "__call__"),
MethodSlot(reprfunc, "tp_str", "__str__"),
SyntheticSlot("tp_getattro", ["__getattr__"], "0"), #"PyObject_GenericGetAttr"),
SyntheticSlot("tp_setattro", ["__setattr__", "__delattr__"], "0"), #"PyObject_GenericSetAttr"),
SuiteSlot(PyBufferProcs, "PyBufferProcs", "tp_as_buffer"),
TypeFlagsSlot("tp_flags"),
DocStringSlot("tp_doc"),
GCDependentSlot("tp_traverse"),
GCDependentSlot("tp_clear"),
# Later -- synthesize a method to split into separate ops?
MethodSlot(richcmpfunc, "tp_richcompare", "__richcmp__"),
EmptySlot("tp_weaklistoffset"),
MethodSlot(getiterfunc, "tp_iter", "__iter__"),
MethodSlot(iternextfunc, "tp_iternext", "__next__"),
MethodTableSlot("tp_methods"),
MemberTableSlot("tp_members"),
GetSetSlot("tp_getset"),
BaseClassSlot("tp_base"), #EmptySlot("tp_base"),
EmptySlot("tp_dict"),
SyntheticSlot("tp_descr_get", ["__get__"], "0"),
SyntheticSlot("tp_descr_set", ["__set__", "__delete__"], "0"),
EmptySlot("tp_dictoffset"),
MethodSlot(initproc, "tp_init", "__init__"),
EmptySlot("tp_alloc"), #FixedSlot("tp_alloc", "PyType_GenericAlloc"),
InternalMethodSlot("tp_new"),
EmptySlot("tp_free"),
EmptySlot("tp_is_gc"),
EmptySlot("tp_bases"),
EmptySlot("tp_mro"),
EmptySlot("tp_cache"),
EmptySlot("tp_subclasses"),
EmptySlot("tp_weaklist"),
)
#------------------------------------------------------------------------------------------
#
# Descriptors for special methods which don't appear directly
# in the type object or its substructures. These methods are
# called from slot functions synthesized by Cython.
#
#------------------------------------------------------------------------------------------
MethodSlot(initproc, "", "__cinit__")
MethodSlot(destructor, "", "__dealloc__")
MethodSlot(objobjargproc, "", "__setitem__")
MethodSlot(objargproc, "", "__delitem__")
MethodSlot(ssizessizeobjargproc, "", "__setslice__")
MethodSlot(ssizessizeargproc, "", "__delslice__")
MethodSlot(getattrofunc, "", "__getattr__")
MethodSlot(setattrofunc, "", "__setattr__")
MethodSlot(delattrofunc, "", "__delattr__")
MethodSlot(descrgetfunc, "", "__get__")
MethodSlot(descrsetfunc, "", "__set__")
MethodSlot(descrdelfunc, "", "__delete__")
# Method flags for python-exposed methods.
method_noargs = "METH_NOARGS"
method_onearg = "METH_O"
method_varargs = "METH_VARARGS"
method_keywords = "METH_KEYWORDS"
method_coexist = "METH_COEXIST"
version = '0.9.6.13'
###############################################
#
# Odds and ends for debugging
#
###############################################
def print_call_chain(*args):
import sys
print(" ".join(map(str, args)))
f = sys._getframe(1)
while f:
name = f.f_code.co_name
s = f.f_locals.get('self', None)
if s:
c = getattr(s, "__class__", None)
if c:
name = "%s.%s" % (c.__name__, name)
print("Called from: %s %s" % (name, f.f_lineno))
f = f.f_back
print("-" * 70)
# July 2002, Graham Fawcett
#
# this hack was inspired by the way Thomas Heller got py2exe
# to appear as a distutil command
#
# we replace distutils.command.build_ext with our own version
# and keep the old one under the module name _build_ext,
# so that *our* build_ext can make use of it.
from build_ext import build_ext
# from extension import Extension
"""Cython.Distutils.build_ext
Implements a version of the Distutils 'build_ext' command, for
building Cython extension modules."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id:$"
import sys, os, string, re
from types import *
from distutils.core import Command
from distutils.errors import *
from distutils.sysconfig import customize_compiler, get_python_version
from distutils.dep_util import newer, newer_group
from distutils import log
from distutils.dir_util import mkpath
try:
from Cython.Compiler.Main \
import CompilationOptions, \
default_options as pyrex_default_options, \
compile as cython_compile
from Cython.Compiler.Errors import PyrexError
except ImportError, e:
PyrexError = None
from distutils.command import build_ext as _build_ext
extension_name_re = _build_ext.extension_name_re
show_compilers = _build_ext.show_compilers
class build_ext(_build_ext.build_ext):
description = "build C/C++ and Cython extensions (compile/link to build directory)"
sep_by = _build_ext.build_ext.sep_by
user_options = _build_ext.build_ext.user_options
boolean_options = _build_ext.build_ext.boolean_options
help_options = _build_ext.build_ext.help_options
# Add the pyrex specific data.
user_options.extend([
('pyrex-cplus', None,
"generate C++ source files"),
('pyrex-create-listing', None,
"write errors to a listing file"),
('pyrex-include-dirs=', None,
"path to the Cython include files" + sep_by),
('pyrex-c-in-temp', None,
"put generated C files in temp directory"),
('pyrex-gen-pxi', None,
"generate .pxi file for public declarations"),
])
boolean_options.extend([
'pyrex-cplus', 'pyrex-create-listing', 'pyrex-c-in-temp'
])
def initialize_options(self):
_build_ext.build_ext.initialize_options(self)
self.pyrex_cplus = 0
self.pyrex_create_listing = 0
self.pyrex_include_dirs = None
self.pyrex_c_in_temp = 0
self.pyrex_gen_pxi = 0
def finalize_options (self):
_build_ext.build_ext.finalize_options(self)
if self.pyrex_include_dirs is None:
self.pyrex_include_dirs = []
elif type(self.pyrex_include_dirs) is StringType:
self.pyrex_include_dirs = \
string.split(self.pyrex_include_dirs, os.pathsep)
# finalize_options ()
def build_extensions(self):
# First, sanity-check the 'extensions' list
self.check_extensions_list(self.extensions)
for ext in self.extensions:
ext.sources = self.cython_sources(ext.sources, ext)
self.build_extension(ext)
def cython_sources(self, sources, extension):
"""
Walk the list of source files in 'sources', looking for Cython
source (.pyx) files. Run Cython on all that are found, and return
a modified 'sources' list with Cython source files replaced by the
generated C (or C++) files.
"""
if PyrexError == None:
raise DistutilsPlatformError, \
("Cython does not appear to be installed "
"on platform '%s'") % os.name
new_sources = []
pyrex_sources = []
pyrex_targets = {}
# Setup create_list and cplus from the extension options if
# Cython.Distutils.extension.Extension is used, otherwise just
# use what was parsed from the command-line or the configuration file.
# cplus will also be set to true is extension.language is equal to
# 'C++' or 'c++'.
#try:
# create_listing = self.pyrex_create_listing or \
# extension.pyrex_create_listing
# cplus = self.pyrex_cplus or \
# extension.pyrex_cplus or \
# (extension.language != None and \
# extension.language.lower() == 'c++')
#except AttributeError:
# create_listing = self.pyrex_create_listing
# cplus = self.pyrex_cplus or \
# (extension.language != None and \
# extension.language.lower() == 'c++')
create_listing = self.pyrex_create_listing or \
getattr(extension, 'pyrex_create_listing', 0)
cplus = self.pyrex_cplus or getattr(extension, 'pyrex_cplus', 0) or \
(extension.language and extension.language.lower() == 'c++')
pyrex_gen_pxi = self.pyrex_gen_pxi or getattr(extension, 'pyrex_gen_pxi', 0)
# Set up the include_path for the Cython compiler:
# 1. Start with the command line option.
# 2. Add in any (unique) paths from the extension
# pyrex_include_dirs (if Cython.Distutils.extension is used).
# 3. Add in any (unique) paths from the extension include_dirs
includes = self.pyrex_include_dirs
try:
for i in extension.pyrex_include_dirs:
if not i in includes:
includes.append(i)
except AttributeError:
pass
for i in extension.include_dirs:
if not i in includes:
includes.append(i)
# Set the target_ext to '.c'. Cython will change this to '.cpp' if
# needed.
if cplus:
target_ext = '.cpp'
else:
target_ext = '.c'
# Decide whether to drop the generated C files into the temp dir
# or the source tree.
if not self.inplace and (self.pyrex_c_in_temp
or getattr(extension, 'pyrex_c_in_temp', 0)):
target_dir = os.path.join(self.build_temp, "pyrex")
else:
target_dir = None
newest_dependency = None
for source in sources:
(base, ext) = os.path.splitext(os.path.basename(source))
if ext == ".pyx": # Cython source file
output_dir = target_dir or os.path.dirname(source)
new_sources.append(os.path.join(output_dir, base + target_ext))
pyrex_sources.append(source)
pyrex_targets[source] = new_sources[-1]
elif ext == '.pxi' or ext == '.pxd':
if newest_dependency is None \
or newer(source, newest_dependency):
newest_dependency = source
else:
new_sources.append(source)
if not pyrex_sources:
return new_sources
module_name = extension.name
for source in pyrex_sources:
target = pyrex_targets[source]
rebuild = self.force or newer(source, target)
if not rebuild and newest_dependency is not None:
rebuild = newer(newest_dependency, target)
if rebuild:
log.info("cythoning %s to %s", source, target)
self.mkpath(os.path.dirname(target))
options = CompilationOptions(pyrex_default_options,
use_listing_file = create_listing,
include_path = includes,
output_file = target,
cplus = cplus,
generate_pxi = pyrex_gen_pxi)
result = cython_compile(source, options=options,
full_module_name=module_name)
return new_sources
# cython_sources ()
# class build_ext
"""Pyrex.Distutils.extension
Provides a modified Extension class, that understands hou to describe
Pyrex extension modules in setup scripts."""
__revision__ = "$Id:$"
import os, string, sys
from types import *
import distutils.extension as _Extension
try:
import warnings
except ImportError:
warnings = None
class Extension(_Extension.Extension):
_Extension.Extension.__doc__ + \
"""pyrex_include_dirs : [string]
list of directories to search for Pyrex header files (.pxd) (in
Unix form for portability)
pyrex_create_listing_file : boolean
write pyrex error messages to a listing (.lis) file.
pyrex_cplus : boolean
use the C++ compiler for compiling and linking.
pyrex_c_in_temp : boolean
put generated C files in temp directory.
pyrex_gen_pxi : boolean
generate .pxi file for public declarations
"""
# When adding arguments to this constructor, be sure to update
# user_options.extend in build_ext.py.
def __init__ (self, name, sources,
include_dirs = None,
define_macros = None,
undef_macros = None,
library_dirs = None,
libraries = None,
runtime_library_dirs = None,
extra_objects = None,
extra_compile_args = None,
extra_link_args = None,
export_symbols = None,
#swig_opts = None,
depends = None,
language = None,
pyrex_include_dirs = None,
pyrex_create_listing = 0,
pyrex_cplus = 0,
pyrex_c_in_temp = 0,
pyrex_gen_pxi = 0,
**kw):
_Extension.Extension.__init__(self, name, sources,
include_dirs = include_dirs,
define_macros = define_macros,
undef_macros = undef_macros,
library_dirs = library_dirs,
libraries = libraries,
runtime_library_dirs = runtime_library_dirs,
extra_objects = extra_objects,
extra_compile_args = extra_compile_args,
extra_link_args = extra_link_args,
export_symbols = export_symbols,
#swig_opts = swig_opts,
depends = depends,
language = language,
**kw)
self.pyrex_include_dirs = pyrex_include_dirs or []
self.pyrex_create_listing = pyrex_create_listing
self.pyrex_cplus = pyrex_cplus
self.pyrex_c_in_temp = pyrex_c_in_temp
self.pyrex_gen_pxi = pyrex_gen_pxi
# class Extension
read_setup_file = _Extension.read_setup_file
#
# Pyrex - Darwin system interface
#
verbose = 0
gcc_pendantic = True
gcc_warnings_are_errors = True
gcc_all_warnings = True
gcc_optimize = False
import os, sys
from Cython.Utils import replace_suffix
from Cython.Compiler.Errors import PyrexError
version_string = "%s.%s" % sys.version_info[:2]
py_include_dirs = [
"/Library/Frameworks/Python.framework/Versions/%s/Headers" % version_string
]
# MACOSX_DEPLOYMENT_TARGET can be set to 10.3 in most cases.
# But for the built-in Python 2.5.1 on Leopard, it needs to be set for 10.5.
# This looks like a bug that will be fixed in 2.5.2. If Apple updates their
# Python to 2.5.2, this fix should be OK.
import distutils.sysconfig as sc
python_prefix = sc.get_config_var('prefix')
leopard_python_prefix = '/System/Library/Frameworks/Python.framework/Versions/2.5'
full_version = "%s.%s.%s" % sys.version_info[:3]
if python_prefix == leopard_python_prefix and full_version == '2.5.1':
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.5"
else:
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.3"
compilers = ["gcc", "g++"]
compiler_options = \
"-g -c -fno-strict-aliasing -Wno-long-double -no-cpp-precomp " \
"-mno-fused-madd -fno-common -dynamic " \
.split()
if gcc_pendantic:
compiler_options.extend(["-pedantic", "-Wno-long-long"])
if gcc_warnings_are_errors:
compiler_options.append("-Werror")
if gcc_all_warnings:
compiler_options.append("-Wall")
compiler_options.append("-Wno-unused-function")
if gcc_optimize:
compiler_options.append("-O")
linkers = ["gcc", "g++"]
linker_options = \
"-Wl,-F.,-w -bundle -undefined dynamic_lookup" \
.split()
#linker_options = \
# "-Wl,-F.,-w -bundle -framework Python" \
# .split()
class CCompilerError(PyrexError):
pass
def c_compile(c_file, verbose_flag = 0, cplus = 0, obj_suffix = ".o"):
# Compile the given C source file to produce
# an object file. Returns the pathname of the
# resulting file.
c_file = os.path.join(os.getcwd(), c_file)
o_file = replace_suffix(c_file, obj_suffix)
include_options = []
for dir in py_include_dirs:
include_options.append("-I%s" % dir)
compiler = compilers[bool(cplus)]
args = [compiler] + compiler_options + include_options + [c_file, "-o", o_file]
if verbose_flag or verbose:
print(" ".join(args))
#print compiler, args ###
status = os.spawnvp(os.P_WAIT, compiler, args)
if status != 0:
raise CCompilerError("C compiler returned status %s" % status)
return o_file
def c_link(obj_file, verbose_flag = 0, extra_objects = [], cplus = 0):
return c_link_list([obj_file] + extra_objects, verbose_flag, cplus)
def c_link_list(obj_files, verbose_flag = 0, cplus = 0):
# Link the given object files into a dynamically
# loadable extension file. Returns the pathname
# of the resulting file.
out_file = replace_suffix(obj_files[0], ".so")
linker = linkers[bool(cplus)]
args = [linker] + linker_options + obj_files + ["-o", out_file]
if verbose_flag or verbose:
print(" ".join(args))
status = os.spawnvp(os.P_WAIT, linker, args)
if status != 0:
raise CCompilerError("Linker returned status %s" % status)
return out_file
#
# Pyrex -- Mac system interface
#
import os, sys, string
import aetools
from aetools import TalkTo
from StdSuites.Standard_Suite import Standard_Suite_Events as Standard_Suite
from Cython.Utils import replace_suffix
from Cython.Compiler.Errors import PyrexError
c_compiler = "MWCPPC"
c_optimizations = "off"
#c_linker = "PPCLink"
c_linker = "MWLinkPPC"
shared_lib_suffix = ".slb"
#py_home = "Python2.2:Home:"
py_home = sys.exec_prefix
py_include_dirs = (
py_home + "Include:",
py_home + "Mac:Include:"
)
pythoncore = py_home + "PythonCore"
mwlibdir = "MPW:Interfaces&Libraries:Libraries:MWPPCLibraries:"
libraries = (
#mwlibdir + "'MSL C.PPC.Lib'",
#mwlibdir + "'MSL RuntimePPC.Lib'",
mwlibdir + "'MSL ShLibRuntime.Lib'",
mwlibdir + "InterfaceLib",
#mwlibdir + "MathLib",
)
class CCompilerError(PyrexError):
pass
#---------------- ToolServer ---------------------------
from TS_Misc_Suite import TS_Misc_Suite
class ToolServer(Standard_Suite, TS_Misc_Suite, TalkTo):
pass
def send_toolserver_command(cmd):
ts = ToolServer('MPSX', start = 1)
return ts.DoScript(cmd)
def do_toolserver_command(command):
try:
result = send_toolserver_command(command)
except aetools.Error, e:
raise CCompilerError("Apple Event error: %s" % e)
errn, stat, stdout, stderr = result
if errn:
raise CCompilerError("ToolServer error: %s" % errn)
stdout = string.replace(stdout, "\r", "\n")
stderr = string.replace(stderr, "\r", "\n")
if stdout:
#print "<<< Begin ToolServer StdOut >>>"
sys.stderr.write(stdout)
#print "<<< End ToolServer StdOut >>>"
if stderr:
#print "<<< Begin ToolServer StdErr >>>"
sys.stderr.write(stderr)
#print "<<< End ToolServer StdErr >>>"
return stat
#-------------------------------------------------------
def c_compile(c_file):
# Compile the given C source file to produce
# an object file. Returns the pathname of the
# resulting file.
c_file = os.path.join(os.getcwd(), c_file)
#print "c_compile: c_file =", repr(c_file) ###
c_file_dir = os.path.dirname(c_file)
o_file = replace_suffix(c_file, ".o")
include_options = ["-i %s" % c_file_dir]
for dir in py_include_dirs:
include_options.append("-i %s" % dir)
command = "%s -opt %s -nomapcr -w off -r %s %s -o %s" % (
c_compiler,
c_optimizations,
string.join(include_options),
c_file,
o_file,
#e_file
)
#print "...command =", repr(command) ###
stat = do_toolserver_command(command)
if stat:
raise CCompilerError("C compiler returned status %s" % stat)
return o_file
def c_link(obj_file):
return c_link_list([obj_file])
def c_link_list(obj_files):
# Link the given object files into a dynamically
# loadable extension file. Returns the pathname
# of the resulting file.
out_file = replace_suffix(obj_files[0], shared_lib_suffix)
command = "%s -xm s -export all %s %s %s -o %s" % (
c_linker,
string.join(obj_files),
pythoncore,
string.join(libraries),
out_file)
stat = do_toolserver_command(command)
if stat:
raise CCompilerError("Linker returned status %s" % stat)
return out_file
def test_c_compile(link = 0):
objs = []
for arg in sys.argv[1:]:
if arg.endswith(".c"):
try:
obj = c_compile(arg)
except PyrexError, e:
#print "Caught a PyrexError:" ###
#print repr(e) ###
print("%s.%s: %s" % (e.__class__.__module__,
e.__class__.__name__, e))
sys.exit(1)
else:
obj = arg
objs.append(obj)
if link:
c_link_list(objs)
#
# Pyrex -- Misc Mac-specific things
#
import os, MacOS, macfs
def open_new_file(path):
# On the Mac, try to preserve Finder position
# of previously existing file.
fsspec = macfs.FSSpec(path)
try:
old_finfo = fsspec.GetFInfo()
except MacOS.Error, e:
#print "MacUtils.open_new_file:", e ###
old_finfo = None
try:
os.unlink(path)
except OSError:
pass
file = open(path, "w")
new_finfo = fsspec.GetFInfo()
if old_finfo:
#print "MacUtils.open_new_file:", path ###
#print "...old file info =", old_finfo.Creator, old_finfo.Type, old_finfo.Location ###
#print "...new file info =", new_finfo.Creator, new_finfo.Type, new_finfo.Location ###
new_finfo.Location = old_finfo.Location
new_finfo.Flags = old_finfo.Flags
# Make darn sure the type and creator are right. There seems
# to be a bug in MacPython 2.2 that screws them up sometimes.
new_finfo.Creator = "R*ch"
new_finfo.Type = "TEXT"
fsspec.SetFInfo(new_finfo)
return file
# Makefile for Darwin
# Change this to your Python source location
PYTHON := /Local/Build/Pythonic/python/2.3
INCLUDE := -I$(PYTHON) -I$(PYTHON)/Include -I$(PYTHON)/Mac/Include
CCOPTS := -fno-strict-aliasing -Wno-long-double -no-cpp-precomp \
-mno-fused-madd -fno-common -dynamic
LDOPTS := -Wl,-F.,-w -bundle -framework Python -framework Carbon
all: _File.so
_File.o: _Filemodule_patched.c
gcc -c $(INCLUDE) $(OPTS) $< -o $@
_File.so: _File.o
gcc $(LDOPTS) $< -o $@
"""Suite Misc Suite: Suite that adds additional features to the Application.
Level 1, version 1
Generated from Macintosh HD:Desktop Folder:ToolServer 3.4.1:ToolServer
AETE/AEUT resource version 1/0, language 0, script 0
"""
import aetools
import MacOS
_code = 'misc'
class TS_Misc_Suite:
def DoScript(self, _object, _attributes={}, **_arguments):
"""DoScript: Execute an MPW command, any command that could be executed from the command line can be sent as a script.
Required argument: The script to execute
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'misc'
_subcode = 'dosc'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
#if _arguments.has_key('errn'):
# raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
#if _arguments.has_key('----'):
# return _arguments['----']
errn = 0
stat = 0
stdout = ""
stderr = ""
if _arguments.has_key('errn'):
errn = _arguments['errn']
if errn:
errn = aetools.decodeerror(_arguments)
if _arguments.has_key('stat'):
stat = _arguments['stat']
if _arguments.has_key('----'):
stdout = _arguments['----']
if _arguments.has_key('diag'):
stderr = _arguments['diag']
return (errn, stat, stdout, stderr)
#
# Indices of types declared in this module
#
_classdeclarations = {
}
_propdeclarations = {
}
_compdeclarations = {
}
_enumdeclarations = {
}
/*
* This is a hacked version of _Filemodule.c from the Python 2.3
* distribution to support access to the finderInfo field of the
* FSCatalogInfo data structure.
*/
/* ========================== Module _File ========================== */
#include "Python.h"
#ifdef _WIN32
#include "pywintoolbox.h"
#else
#include "macglue.h"
#include "pymactoolbox.h"
#endif
/* Macro to test whether a weak-loaded CFM function exists */
#define PyMac_PRECHECK(rtn) do { if ( &rtn == NULL ) {\
PyErr_SetString(PyExc_NotImplementedError, \
"Not available in this shared library/OS version"); \
return NULL; \
}} while(0)
#ifdef WITHOUT_FRAMEWORKS
#include <Files.h>
#else
#include <Carbon/Carbon.h>
#endif
#ifdef USE_TOOLBOX_OBJECT_GLUE
extern int _PyMac_GetFSSpec(PyObject *v, FSSpec *spec);
extern int _PyMac_GetFSRef(PyObject *v, FSRef *fsr);
extern PyObject *_PyMac_BuildFSSpec(FSSpec *spec);
extern PyObject *_PyMac_BuildFSRef(FSRef *spec);
#define PyMac_GetFSSpec _PyMac_GetFSSpec
#define PyMac_GetFSRef _PyMac_GetFSRef
#define PyMac_BuildFSSpec _PyMac_BuildFSSpec
#define PyMac_BuildFSRef _PyMac_BuildFSRef
#else
extern int PyMac_GetFSSpec(PyObject *v, FSSpec *spec);
extern int PyMac_GetFSRef(PyObject *v, FSRef *fsr);
extern PyObject *PyMac_BuildFSSpec(FSSpec *spec);
extern PyObject *PyMac_BuildFSRef(FSRef *spec);
#endif
/* Forward declarations */
static PyObject *FInfo_New(FInfo *itself);
static PyObject *FSRef_New(FSRef *itself);
static PyObject *FSSpec_New(FSSpec *itself);
static PyObject *Alias_New(AliasHandle itself);
static int FInfo_Convert(PyObject *v, FInfo *p_itself);
#define FSRef_Convert PyMac_GetFSRef
#define FSSpec_Convert PyMac_GetFSSpec
static int Alias_Convert(PyObject *v, AliasHandle *p_itself);
/*
** UTCDateTime records
*/
static int
UTCDateTime_Convert(PyObject *v, UTCDateTime *ptr)
{
return PyArg_Parse(v, "(HlH)", &ptr->highSeconds, &ptr->lowSeconds, &ptr->fraction);
}
static PyObject *
UTCDateTime_New(UTCDateTime *ptr)
{
return Py_BuildValue("(HlH)", ptr->highSeconds, ptr->lowSeconds, ptr->fraction);
}
/*
** Optional fsspec and fsref pointers. None will pass NULL
*/
static int
myPyMac_GetOptFSSpecPtr(PyObject *v, FSSpec **spec)
{
if (v == Py_None) {
*spec = NULL;
return 1;
}
return PyMac_GetFSSpec(v, *spec);
}
static int
myPyMac_GetOptFSRefPtr(PyObject *v, FSRef **ref)
{
if (v == Py_None) {
*ref = NULL;
return 1;
}
return PyMac_GetFSRef(v, *ref);
}
/*
** Parse/generate objsect
*/
static PyObject *
PyMac_BuildHFSUniStr255(HFSUniStr255 *itself)
{
return Py_BuildValue("u#", itself->unicode, itself->length);
}
static PyObject *File_Error;
static PyTypeObject FInfo_Type;
#define FInfo_Check(x) ((x)->ob_type == &FInfo_Type || PyObject_TypeCheck((x), &FInfo_Type))
typedef struct FInfoObject {
PyObject_HEAD
FInfo ob_itself;
} FInfoObject;
/* ------------------- Object type FSCatalogInfo -------------------- */
static PyTypeObject FSCatalogInfo_Type;
#define FSCatalogInfo_Check(x) ((x)->ob_type == &FSCatalogInfo_Type || PyObject_TypeCheck((x), &FSCatalogInfo_Type))
typedef struct FSCatalogInfoObject {
PyObject_HEAD
FSCatalogInfo ob_itself;
} FSCatalogInfoObject;
static PyObject *FSCatalogInfo_New(FSCatalogInfo *itself)
{
FSCatalogInfoObject *it;
if (itself == NULL) return Py_None;
it = PyObject_NEW(FSCatalogInfoObject, &FSCatalogInfo_Type);
if (it == NULL) return NULL;
it->ob_itself = *itself;
return (PyObject *)it;
}
static int FSCatalogInfo_Convert(PyObject *v, FSCatalogInfo *p_itself)
{
if (!FSCatalogInfo_Check(v))
{
PyErr_SetString(PyExc_TypeError, "FSCatalogInfo required");
return 0;
}
*p_itself = ((FSCatalogInfoObject *)v)->ob_itself;
return 1;
}
static void FSCatalogInfo_dealloc(FSCatalogInfoObject *self)
{
/* Cleanup of self->ob_itself goes here */
self->ob_type->tp_free((PyObject *)self);
}
static PyMethodDef FSCatalogInfo_methods[] = {
{NULL, NULL, 0}
};
static PyObject *FSCatalogInfo_get_nodeFlags(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("H", self->ob_itself.nodeFlags);
}
static int FSCatalogInfo_set_nodeFlags(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "H", &self->ob_itself.nodeFlags)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_volume(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("h", self->ob_itself.volume);
}
static int FSCatalogInfo_set_volume(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "h", &self->ob_itself.volume)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_parentDirID(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("l", self->ob_itself.parentDirID);
}
static int FSCatalogInfo_set_parentDirID(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "l", &self->ob_itself.parentDirID)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_nodeID(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("l", self->ob_itself.nodeID);
}
static int FSCatalogInfo_set_nodeID(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "l", &self->ob_itself.nodeID)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_createDate(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.createDate);
}
static int FSCatalogInfo_set_createDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.createDate)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_contentModDate(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.contentModDate);
}
static int FSCatalogInfo_set_contentModDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.contentModDate)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_attributeModDate(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.attributeModDate);
}
static int FSCatalogInfo_set_attributeModDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.attributeModDate)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_accessDate(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.accessDate);
}
static int FSCatalogInfo_set_accessDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.accessDate)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_backupDate(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.backupDate);
}
static int FSCatalogInfo_set_backupDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.backupDate)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_permissions(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("(llll)", self->ob_itself.permissions[0], self->ob_itself.permissions[1], self->ob_itself.permissions[2], self->ob_itself.permissions[3]);
}
static int FSCatalogInfo_set_permissions(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "(llll)", &self->ob_itself.permissions[0], &self->ob_itself.permissions[1], &self->ob_itself.permissions[2], &self->ob_itself.permissions[3])-1;
return 0;
}
static PyObject *FSCatalogInfo_get_valence(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("l", self->ob_itself.valence);
}
static int FSCatalogInfo_set_valence(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "l", &self->ob_itself.valence)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_dataLogicalSize(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("l", self->ob_itself.dataLogicalSize);
}
static int FSCatalogInfo_set_dataLogicalSize(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "l", &self->ob_itself.dataLogicalSize)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_dataPhysicalSize(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("l", self->ob_itself.dataPhysicalSize);
}
static int FSCatalogInfo_set_dataPhysicalSize(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "l", &self->ob_itself.dataPhysicalSize)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_rsrcLogicalSize(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("l", self->ob_itself.rsrcLogicalSize);
}
static int FSCatalogInfo_set_rsrcLogicalSize(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "l", &self->ob_itself.rsrcLogicalSize)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_rsrcPhysicalSize(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("l", self->ob_itself.rsrcPhysicalSize);
}
static int FSCatalogInfo_set_rsrcPhysicalSize(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "l", &self->ob_itself.rsrcPhysicalSize)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_sharingFlags(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("l", self->ob_itself.sharingFlags);
}
static int FSCatalogInfo_set_sharingFlags(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "l", &self->ob_itself.sharingFlags)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_userPrivileges(FSCatalogInfoObject *self, void *closure)
{
return Py_BuildValue("b", self->ob_itself.userPrivileges);
}
static int FSCatalogInfo_set_userPrivileges(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "b", &self->ob_itself.userPrivileges)-1;
return 0;
}
static PyObject *FSCatalogInfo_get_finderInfo(FSCatalogInfoObject *self, void *closure)
{
return FInfo_New((FInfo *)self->ob_itself.finderInfo);
}
static int FSCatalogInfo_set_finderInfo(FSCatalogInfoObject *self, PyObject *v, void *closure)
{
if (!FInfo_Check(v)) {
PyErr_SetString(PyExc_TypeError, "Expected an FInfo object");
return -1;
}
*(FInfo *)self->ob_itself.finderInfo = ((FInfoObject *)v)->ob_itself;
return 0;
}
static PyGetSetDef FSCatalogInfo_getsetlist[] = {
{"nodeFlags", (getter)FSCatalogInfo_get_nodeFlags, (setter)FSCatalogInfo_set_nodeFlags, NULL},
{"volume", (getter)FSCatalogInfo_get_volume, (setter)FSCatalogInfo_set_volume, NULL},
{"parentDirID", (getter)FSCatalogInfo_get_parentDirID, (setter)FSCatalogInfo_set_parentDirID, NULL},
{"nodeID", (getter)FSCatalogInfo_get_nodeID, (setter)FSCatalogInfo_set_nodeID, NULL},
{"createDate", (getter)FSCatalogInfo_get_createDate, (setter)FSCatalogInfo_set_createDate, NULL},
{"contentModDate", (getter)FSCatalogInfo_get_contentModDate, (setter)FSCatalogInfo_set_contentModDate, NULL},
{"attributeModDate", (getter)FSCatalogInfo_get_attributeModDate, (setter)FSCatalogInfo_set_attributeModDate, NULL},
{"accessDate", (getter)FSCatalogInfo_get_accessDate, (setter)FSCatalogInfo_set_accessDate, NULL},
{"backupDate", (getter)FSCatalogInfo_get_backupDate, (setter)FSCatalogInfo_set_backupDate, NULL},
{"permissions", (getter)FSCatalogInfo_get_permissions, (setter)FSCatalogInfo_set_permissions, NULL},
{"valence", (getter)FSCatalogInfo_get_valence, (setter)FSCatalogInfo_set_valence, NULL},
{"dataLogicalSize", (getter)FSCatalogInfo_get_dataLogicalSize, (setter)FSCatalogInfo_set_dataLogicalSize, NULL},
{"dataPhysicalSize", (getter)FSCatalogInfo_get_dataPhysicalSize, (setter)FSCatalogInfo_set_dataPhysicalSize, NULL},
{"rsrcLogicalSize", (getter)FSCatalogInfo_get_rsrcLogicalSize, (setter)FSCatalogInfo_set_rsrcLogicalSize, NULL},
{"rsrcPhysicalSize", (getter)FSCatalogInfo_get_rsrcPhysicalSize, (setter)FSCatalogInfo_set_rsrcPhysicalSize, NULL},
{"sharingFlags", (getter)FSCatalogInfo_get_sharingFlags, (setter)FSCatalogInfo_set_sharingFlags, NULL},
{"userPrivileges", (getter)FSCatalogInfo_get_userPrivileges, (setter)FSCatalogInfo_set_userPrivileges, NULL},
{"finderInfo", (getter)FSCatalogInfo_get_finderInfo, (setter)FSCatalogInfo_set_finderInfo, NULL},
{NULL, NULL, NULL, NULL},
};
#define FSCatalogInfo_compare NULL
#define FSCatalogInfo_repr NULL
#define FSCatalogInfo_hash NULL
static int FSCatalogInfo_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
{
static char *kw[] = {
"nodeFlags",
"volume",
"parentDirID",
"nodeID",
"createDate",
"contentModDate",
"atributeModDate",
"accessDate",
"backupDate",
"valence",
"dataLogicalSize",
"dataPhysicalSize",
"rsrcLogicalSize",
"rsrcPhysicalSize",
"sharingFlags",
"userPrivileges"
, 0};
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|HhllO&O&O&O&O&llllllb", kw, &((FSCatalogInfoObject *)self)->ob_itself.nodeFlags,
&((FSCatalogInfoObject *)self)->ob_itself.volume,
&((FSCatalogInfoObject *)self)->ob_itself.parentDirID,
&((FSCatalogInfoObject *)self)->ob_itself.nodeID,
UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.createDate,
UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.contentModDate,
UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.attributeModDate,
UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.accessDate,
UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.backupDate,
&((FSCatalogInfoObject *)self)->ob_itself.valence,
&((FSCatalogInfoObject *)self)->ob_itself.dataLogicalSize,
&((FSCatalogInfoObject *)self)->ob_itself.dataPhysicalSize,
&((FSCatalogInfoObject *)self)->ob_itself.rsrcLogicalSize,
&((FSCatalogInfoObject *)self)->ob_itself.rsrcPhysicalSize,
&((FSCatalogInfoObject *)self)->ob_itself.sharingFlags,
&((FSCatalogInfoObject *)self)->ob_itself.userPrivileges))
{
return -1;
}
return 0;
}
#define FSCatalogInfo_tp_alloc PyType_GenericAlloc
static PyObject *FSCatalogInfo_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
PyObject *self;
if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
memset(&((FSCatalogInfoObject *)self)->ob_itself, 0, sizeof(FSCatalogInfo));
return self;
}
#define FSCatalogInfo_tp_free PyObject_Del
static PyTypeObject FSCatalogInfo_Type = {
PyObject_HEAD_INIT(NULL)
0, /*ob_size*/
"Carbon.File.FSCatalogInfo", /*tp_name*/
sizeof(FSCatalogInfoObject), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor) FSCatalogInfo_dealloc, /*tp_dealloc*/
0, /*tp_print*/
(getattrfunc)0, /*tp_getattr*/
(setattrfunc)0, /*tp_setattr*/
(cmpfunc) FSCatalogInfo_compare, /*tp_compare*/
(reprfunc) FSCatalogInfo_repr, /*tp_repr*/
(PyNumberMethods *)0, /* tp_as_number */
(PySequenceMethods *)0, /* tp_as_sequence */
(PyMappingMethods *)0, /* tp_as_mapping */
(hashfunc) FSCatalogInfo_hash, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
PyObject_GenericGetAttr, /*tp_getattro*/
PyObject_GenericSetAttr, /*tp_setattro */
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /*tp_doc*/
0, /*tp_traverse*/
0, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
FSCatalogInfo_methods, /* tp_methods */
0, /*tp_members*/
FSCatalogInfo_getsetlist, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
FSCatalogInfo_tp_init, /* tp_init */
FSCatalogInfo_tp_alloc, /* tp_alloc */
FSCatalogInfo_tp_new, /* tp_new */
FSCatalogInfo_tp_free, /* tp_free */
};
/* ----------------- End object type FSCatalogInfo ------------------ */
/* ----------------------- Object type FInfo ------------------------ */
static PyObject *FInfo_New(FInfo *itself)
{
FInfoObject *it;
if (itself == NULL) return PyMac_Error(resNotFound);
it = PyObject_NEW(FInfoObject, &FInfo_Type);
if (it == NULL) return NULL;
it->ob_itself = *itself;
return (PyObject *)it;
}
static int FInfo_Convert(PyObject *v, FInfo *p_itself)
{
if (!FInfo_Check(v))
{
PyErr_SetString(PyExc_TypeError, "FInfo required");
return 0;
}
*p_itself = ((FInfoObject *)v)->ob_itself;
return 1;
}
static void FInfo_dealloc(FInfoObject *self)
{
/* Cleanup of self->ob_itself goes here */
self->ob_type->tp_free((PyObject *)self);
}
static PyMethodDef FInfo_methods[] = {
{NULL, NULL, 0}
};
static PyObject *FInfo_get_Type(FInfoObject *self, void *closure)
{
return Py_BuildValue("O&", PyMac_BuildOSType, self->ob_itself.fdType);
}
static int FInfo_set_Type(FInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "O&", PyMac_GetOSType, &self->ob_itself.fdType)-1;
return 0;
}
static PyObject *FInfo_get_Creator(FInfoObject *self, void *closure)
{
return Py_BuildValue("O&", PyMac_BuildOSType, self->ob_itself.fdCreator);
}
static int FInfo_set_Creator(FInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "O&", PyMac_GetOSType, &self->ob_itself.fdCreator)-1;
return 0;
}
static PyObject *FInfo_get_Flags(FInfoObject *self, void *closure)
{
return Py_BuildValue("H", self->ob_itself.fdFlags);
}
static int FInfo_set_Flags(FInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "H", &self->ob_itself.fdFlags)-1;
return 0;
}
static PyObject *FInfo_get_Location(FInfoObject *self, void *closure)
{
return Py_BuildValue("O&", PyMac_BuildPoint, self->ob_itself.fdLocation);
}
static int FInfo_set_Location(FInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "O&", PyMac_GetPoint, &self->ob_itself.fdLocation)-1;
return 0;
}
static PyObject *FInfo_get_Fldr(FInfoObject *self, void *closure)
{
return Py_BuildValue("h", self->ob_itself.fdFldr);
}
static int FInfo_set_Fldr(FInfoObject *self, PyObject *v, void *closure)
{
return PyArg_Parse(v, "h", &self->ob_itself.fdFldr)-1;
return 0;
}
static PyGetSetDef FInfo_getsetlist[] = {
{"Type", (getter)FInfo_get_Type, (setter)FInfo_set_Type, "4-char file type"},
{"Creator", (getter)FInfo_get_Creator, (setter)FInfo_set_Creator, "4-char file creator"},
{"Flags", (getter)FInfo_get_Flags, (setter)FInfo_set_Flags, "Finder flag bits"},
{"Location", (getter)FInfo_get_Location, (setter)FInfo_set_Location, "(x, y) location of the file's icon in its parent finder window"},
{"Fldr", (getter)FInfo_get_Fldr, (setter)FInfo_set_Fldr, "Original folder, for 'put away'"},
{NULL, NULL, NULL, NULL},
};
#define FInfo_compare NULL
#define FInfo_repr NULL
#define FInfo_hash NULL
static int FInfo_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
{
FInfo *itself = NULL;
static char *kw[] = {"itself", 0};
if (PyArg_ParseTupleAndKeywords(args, kwds, "|O&", kw, FInfo_Convert, &itself))
{
if (itself) memcpy(&((FInfoObject *)self)->ob_itself, itself, sizeof(FInfo));
return 0;
}
return -1;
}
#define FInfo_tp_alloc PyType_GenericAlloc
static PyObject *FInfo_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
PyObject *self;
if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
memset(&((FInfoObject *)self)->ob_itself, 0, sizeof(FInfo));
return self;
}
#define FInfo_tp_free PyObject_Del
static PyTypeObject FInfo_Type = {
PyObject_HEAD_INIT(NULL)
0, /*ob_size*/
"Carbon.File.FInfo", /*tp_name*/
sizeof(FInfoObject), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor) FInfo_dealloc, /*tp_dealloc*/
0, /*tp_print*/
(getattrfunc)0, /*tp_getattr*/
(setattrfunc)0, /*tp_setattr*/
(cmpfunc) FInfo_compare, /*tp_compare*/
(reprfunc) FInfo_repr, /*tp_repr*/
(PyNumberMethods *)0, /* tp_as_number */
(PySequenceMethods *)0, /* tp_as_sequence */
(PyMappingMethods *)0, /* tp_as_mapping */
(hashfunc) FInfo_hash, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
PyObject_GenericGetAttr, /*tp_getattro*/
PyObject_GenericSetAttr, /*tp_setattro */
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /*tp_doc*/
0, /*tp_traverse*/
0, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
FInfo_methods, /* tp_methods */
0, /*tp_members*/
FInfo_getsetlist, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
FInfo_tp_init, /* tp_init */
FInfo_tp_alloc, /* tp_alloc */
FInfo_tp_new, /* tp_new */
FInfo_tp_free, /* tp_free */
};
/* --------------------- End object type FInfo ---------------------- */
/* ----------------------- Object type Alias ------------------------ */
static PyTypeObject Alias_Type;
#define Alias_Check(x) ((x)->ob_type == &Alias_Type || PyObject_TypeCheck((x), &Alias_Type))
typedef struct AliasObject {
PyObject_HEAD
AliasHandle ob_itself;
void (*ob_freeit)(AliasHandle ptr);
} AliasObject;
static PyObject *Alias_New(AliasHandle itself)
{
AliasObject *it;
if (itself == NULL) return PyMac_Error(resNotFound);
it = PyObject_NEW(AliasObject, &Alias_Type);
if (it == NULL) return NULL;
it->ob_itself = itself;
it->ob_freeit = NULL;
return (PyObject *)it;
}
static int Alias_Convert(PyObject *v, AliasHandle *p_itself)
{
if (!Alias_Check(v))
{
PyErr_SetString(PyExc_TypeError, "Alias required");
return 0;
}
*p_itself = ((AliasObject *)v)->ob_itself;
return 1;
}
static void Alias_dealloc(AliasObject *self)
{
if (self->ob_freeit && self->ob_itself)
{
self->ob_freeit(self->ob_itself);
}
self->ob_itself = NULL;
self->ob_type->tp_free((PyObject *)self);
}
static PyObject *Alias_ResolveAlias(AliasObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec fromFile__buf__;
FSSpec *fromFile = &fromFile__buf__;
FSSpec target;
Boolean wasChanged;
if (!PyArg_ParseTuple(_args, "O&",
myPyMac_GetOptFSSpecPtr, &fromFile))
return NULL;
_err = ResolveAlias(fromFile,
_self->ob_itself,
&target,
&wasChanged);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&b",
FSSpec_New, &target,
wasChanged);
return _res;
}
static PyObject *Alias_GetAliasInfo(AliasObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
AliasInfoType index;
Str63 theString;
if (!PyArg_ParseTuple(_args, "h",
&index))
return NULL;
_err = GetAliasInfo(_self->ob_itself,
index,
theString);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
PyMac_BuildStr255, theString);
return _res;
}
static PyObject *Alias_ResolveAliasWithMountFlags(AliasObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec fromFile__buf__;
FSSpec *fromFile = &fromFile__buf__;
FSSpec target;
Boolean wasChanged;
unsigned long mountFlags;
if (!PyArg_ParseTuple(_args, "O&l",
myPyMac_GetOptFSSpecPtr, &fromFile,
&mountFlags))
return NULL;
_err = ResolveAliasWithMountFlags(fromFile,
_self->ob_itself,
&target,
&wasChanged,
mountFlags);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&b",
FSSpec_New, &target,
wasChanged);
return _res;
}
static PyObject *Alias_FollowFinderAlias(AliasObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec fromFile__buf__;
FSSpec *fromFile = &fromFile__buf__;
Boolean logon;
FSSpec target;
Boolean wasChanged;
if (!PyArg_ParseTuple(_args, "O&b",
myPyMac_GetOptFSSpecPtr, &fromFile,
&logon))
return NULL;
_err = FollowFinderAlias(fromFile,
_self->ob_itself,
logon,
&target,
&wasChanged);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&b",
FSSpec_New, &target,
wasChanged);
return _res;
}
static PyObject *Alias_FSResolveAliasWithMountFlags(AliasObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef fromFile__buf__;
FSRef *fromFile = &fromFile__buf__;
FSRef target;
Boolean wasChanged;
unsigned long mountFlags;
if (!PyArg_ParseTuple(_args, "O&l",
myPyMac_GetOptFSRefPtr, &fromFile,
&mountFlags))
return NULL;
_err = FSResolveAliasWithMountFlags(fromFile,
_self->ob_itself,
&target,
&wasChanged,
mountFlags);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&b",
FSRef_New, &target,
wasChanged);
return _res;
}
static PyObject *Alias_FSResolveAlias(AliasObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef fromFile__buf__;
FSRef *fromFile = &fromFile__buf__;
FSRef target;
Boolean wasChanged;
if (!PyArg_ParseTuple(_args, "O&",
myPyMac_GetOptFSRefPtr, &fromFile))
return NULL;
_err = FSResolveAlias(fromFile,
_self->ob_itself,
&target,
&wasChanged);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&b",
FSRef_New, &target,
wasChanged);
return _res;
}
static PyObject *Alias_FSFollowFinderAlias(AliasObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef fromFile;
Boolean logon;
FSRef target;
Boolean wasChanged;
if (!PyArg_ParseTuple(_args, "b",
&logon))
return NULL;
_err = FSFollowFinderAlias(&fromFile,
_self->ob_itself,
logon,
&target,
&wasChanged);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&O&b",
FSRef_New, &fromFile,
FSRef_New, &target,
wasChanged);
return _res;
}
static PyMethodDef Alias_methods[] = {
{"ResolveAlias", (PyCFunction)Alias_ResolveAlias, 1,
PyDoc_STR("(FSSpec fromFile) -> (FSSpec target, Boolean wasChanged)")},
{"GetAliasInfo", (PyCFunction)Alias_GetAliasInfo, 1,
PyDoc_STR("(AliasInfoType index) -> (Str63 theString)")},
{"ResolveAliasWithMountFlags", (PyCFunction)Alias_ResolveAliasWithMountFlags, 1,
PyDoc_STR("(FSSpec fromFile, unsigned long mountFlags) -> (FSSpec target, Boolean wasChanged)")},
{"FollowFinderAlias", (PyCFunction)Alias_FollowFinderAlias, 1,
PyDoc_STR("(FSSpec fromFile, Boolean logon) -> (FSSpec target, Boolean wasChanged)")},
{"FSResolveAliasWithMountFlags", (PyCFunction)Alias_FSResolveAliasWithMountFlags, 1,
PyDoc_STR("(FSRef fromFile, unsigned long mountFlags) -> (FSRef target, Boolean wasChanged)")},
{"FSResolveAlias", (PyCFunction)Alias_FSResolveAlias, 1,
PyDoc_STR("(FSRef fromFile) -> (FSRef target, Boolean wasChanged)")},
{"FSFollowFinderAlias", (PyCFunction)Alias_FSFollowFinderAlias, 1,
PyDoc_STR("(Boolean logon) -> (FSRef fromFile, FSRef target, Boolean wasChanged)")},
{NULL, NULL, 0}
};
static PyObject *Alias_get_data(AliasObject *self, void *closure)
{
int size;
PyObject *rv;
size = GetHandleSize((Handle)self->ob_itself);
HLock((Handle)self->ob_itself);
rv = PyString_FromStringAndSize(*(Handle)self->ob_itself, size);
HUnlock((Handle)self->ob_itself);
return rv;
}
#define Alias_set_data NULL
static PyGetSetDef Alias_getsetlist[] = {
{"data", (getter)Alias_get_data, (setter)Alias_set_data, "Raw data of the alias object"},
{NULL, NULL, NULL, NULL},
};
#define Alias_compare NULL
#define Alias_repr NULL
#define Alias_hash NULL
static int Alias_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
{
AliasHandle itself = NULL;
char *rawdata = NULL;
int rawdatalen = 0;
Handle h;
static char *kw[] = {"itself", "rawdata", 0};
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O&s#", kw, Alias_Convert, &itself, &rawdata, &rawdatalen))
return -1;
if (itself && rawdata)
{
PyErr_SetString(PyExc_TypeError, "Only one of itself or rawdata may be specified");
return -1;
}
if (!itself && !rawdata)
{
PyErr_SetString(PyExc_TypeError, "One of itself or rawdata must be specified");
return -1;
}
if (rawdata)
{
if ((h = NewHandle(rawdatalen)) == NULL)
{
PyErr_NoMemory();
return -1;
}
HLock(h);
memcpy((char *)*h, rawdata, rawdatalen);
HUnlock(h);
((AliasObject *)self)->ob_itself = (AliasHandle)h;
return 0;
}
((AliasObject *)self)->ob_itself = itself;
return 0;
}
#define Alias_tp_alloc PyType_GenericAlloc
static PyObject *Alias_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
PyObject *self;
if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
((AliasObject *)self)->ob_itself = NULL;
return self;
}
#define Alias_tp_free PyObject_Del
static PyTypeObject Alias_Type = {
PyObject_HEAD_INIT(NULL)
0, /*ob_size*/
"Carbon.File.Alias", /*tp_name*/
sizeof(AliasObject), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor) Alias_dealloc, /*tp_dealloc*/
0, /*tp_print*/
(getattrfunc)0, /*tp_getattr*/
(setattrfunc)0, /*tp_setattr*/
(cmpfunc) Alias_compare, /*tp_compare*/
(reprfunc) Alias_repr, /*tp_repr*/
(PyNumberMethods *)0, /* tp_as_number */
(PySequenceMethods *)0, /* tp_as_sequence */
(PyMappingMethods *)0, /* tp_as_mapping */
(hashfunc) Alias_hash, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
PyObject_GenericGetAttr, /*tp_getattro*/
PyObject_GenericSetAttr, /*tp_setattro */
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /*tp_doc*/
0, /*tp_traverse*/
0, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
Alias_methods, /* tp_methods */
0, /*tp_members*/
Alias_getsetlist, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
Alias_tp_init, /* tp_init */
Alias_tp_alloc, /* tp_alloc */
Alias_tp_new, /* tp_new */
Alias_tp_free, /* tp_free */
};
/* --------------------- End object type Alias ---------------------- */
/* ----------------------- Object type FSSpec ----------------------- */
static PyTypeObject FSSpec_Type;
#define FSSpec_Check(x) ((x)->ob_type == &FSSpec_Type || PyObject_TypeCheck((x), &FSSpec_Type))
typedef struct FSSpecObject {
PyObject_HEAD
FSSpec ob_itself;
} FSSpecObject;
static PyObject *FSSpec_New(FSSpec *itself)
{
FSSpecObject *it;
if (itself == NULL) return PyMac_Error(resNotFound);
it = PyObject_NEW(FSSpecObject, &FSSpec_Type);
if (it == NULL) return NULL;
it->ob_itself = *itself;
return (PyObject *)it;
}
static void FSSpec_dealloc(FSSpecObject *self)
{
/* Cleanup of self->ob_itself goes here */
self->ob_type->tp_free((PyObject *)self);
}
static PyObject *FSSpec_FSpOpenDF(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
SInt8 permission;
short refNum;
if (!PyArg_ParseTuple(_args, "b",
&permission))
return NULL;
_err = FSpOpenDF(&_self->ob_itself,
permission,
&refNum);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("h",
refNum);
return _res;
}
static PyObject *FSSpec_FSpOpenRF(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
SInt8 permission;
short refNum;
if (!PyArg_ParseTuple(_args, "b",
&permission))
return NULL;
_err = FSpOpenRF(&_self->ob_itself,
permission,
&refNum);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("h",
refNum);
return _res;
}
static PyObject *FSSpec_FSpCreate(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
OSType creator;
OSType fileType;
ScriptCode scriptTag;
if (!PyArg_ParseTuple(_args, "O&O&h",
PyMac_GetOSType, &creator,
PyMac_GetOSType, &fileType,
&scriptTag))
return NULL;
_err = FSpCreate(&_self->ob_itself,
creator,
fileType,
scriptTag);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSSpec_FSpDirCreate(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
ScriptCode scriptTag;
long createdDirID;
if (!PyArg_ParseTuple(_args, "h",
&scriptTag))
return NULL;
_err = FSpDirCreate(&_self->ob_itself,
scriptTag,
&createdDirID);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("l",
createdDirID);
return _res;
}
static PyObject *FSSpec_FSpDelete(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSpDelete(&_self->ob_itself);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSSpec_FSpGetFInfo(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FInfo fndrInfo;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSpGetFInfo(&_self->ob_itself,
&fndrInfo);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
FInfo_New, &fndrInfo);
return _res;
}
static PyObject *FSSpec_FSpSetFInfo(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FInfo fndrInfo;
if (!PyArg_ParseTuple(_args, "O&",
FInfo_Convert, &fndrInfo))
return NULL;
_err = FSpSetFInfo(&_self->ob_itself,
&fndrInfo);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSSpec_FSpSetFLock(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSpSetFLock(&_self->ob_itself);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSSpec_FSpRstFLock(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSpRstFLock(&_self->ob_itself);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSSpec_FSpRename(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
Str255 newName;
if (!PyArg_ParseTuple(_args, "O&",
PyMac_GetStr255, newName))
return NULL;
_err = FSpRename(&_self->ob_itself,
newName);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSSpec_FSpCatMove(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec dest;
if (!PyArg_ParseTuple(_args, "O&",
FSSpec_Convert, &dest))
return NULL;
_err = FSpCatMove(&_self->ob_itself,
&dest);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSSpec_FSpExchangeFiles(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec dest;
if (!PyArg_ParseTuple(_args, "O&",
FSSpec_Convert, &dest))
return NULL;
_err = FSpExchangeFiles(&_self->ob_itself,
&dest);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSSpec_FSpMakeFSRef(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef newRef;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSpMakeFSRef(&_self->ob_itself,
&newRef);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
FSRef_New, &newRef);
return _res;
}
static PyObject *FSSpec_NewAliasMinimal(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
AliasHandle alias;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = NewAliasMinimal(&_self->ob_itself,
&alias);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
Alias_New, alias);
return _res;
}
static PyObject *FSSpec_IsAliasFile(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
Boolean aliasFileFlag;
Boolean folderFlag;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = IsAliasFile(&_self->ob_itself,
&aliasFileFlag,
&folderFlag);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("bb",
aliasFileFlag,
folderFlag);
return _res;
}
static PyObject *FSSpec_as_pathname(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
char strbuf[1024];
OSErr err;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
err = PyMac_GetFullPathname(&_self->ob_itself, strbuf, sizeof(strbuf));
if ( err ) {
PyMac_Error(err);
return NULL;
}
_res = PyString_FromString(strbuf);
return _res;
}
static PyObject *FSSpec_as_tuple(FSSpecObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_res = Py_BuildValue("(iis#)", _self->ob_itself.vRefNum, _self->ob_itself.parID,
&_self->ob_itself.name[1], _self->ob_itself.name[0]);
return _res;
}
static PyMethodDef FSSpec_methods[] = {
{"FSpOpenDF", (PyCFunction)FSSpec_FSpOpenDF, 1,
PyDoc_STR("(SInt8 permission) -> (short refNum)")},
{"FSpOpenRF", (PyCFunction)FSSpec_FSpOpenRF, 1,
PyDoc_STR("(SInt8 permission) -> (short refNum)")},
{"FSpCreate", (PyCFunction)FSSpec_FSpCreate, 1,
PyDoc_STR("(OSType creator, OSType fileType, ScriptCode scriptTag) -> None")},
{"FSpDirCreate", (PyCFunction)FSSpec_FSpDirCreate, 1,
PyDoc_STR("(ScriptCode scriptTag) -> (long createdDirID)")},
{"FSpDelete", (PyCFunction)FSSpec_FSpDelete, 1,
PyDoc_STR("() -> None")},
{"FSpGetFInfo", (PyCFunction)FSSpec_FSpGetFInfo, 1,
PyDoc_STR("() -> (FInfo fndrInfo)")},
{"FSpSetFInfo", (PyCFunction)FSSpec_FSpSetFInfo, 1,
PyDoc_STR("(FInfo fndrInfo) -> None")},
{"FSpSetFLock", (PyCFunction)FSSpec_FSpSetFLock, 1,
PyDoc_STR("() -> None")},
{"FSpRstFLock", (PyCFunction)FSSpec_FSpRstFLock, 1,
PyDoc_STR("() -> None")},
{"FSpRename", (PyCFunction)FSSpec_FSpRename, 1,
PyDoc_STR("(Str255 newName) -> None")},
{"FSpCatMove", (PyCFunction)FSSpec_FSpCatMove, 1,
PyDoc_STR("(FSSpec dest) -> None")},
{"FSpExchangeFiles", (PyCFunction)FSSpec_FSpExchangeFiles, 1,
PyDoc_STR("(FSSpec dest) -> None")},
{"FSpMakeFSRef", (PyCFunction)FSSpec_FSpMakeFSRef, 1,
PyDoc_STR("() -> (FSRef newRef)")},
{"NewAliasMinimal", (PyCFunction)FSSpec_NewAliasMinimal, 1,
PyDoc_STR("() -> (AliasHandle alias)")},
{"IsAliasFile", (PyCFunction)FSSpec_IsAliasFile, 1,
PyDoc_STR("() -> (Boolean aliasFileFlag, Boolean folderFlag)")},
{"as_pathname", (PyCFunction)FSSpec_as_pathname, 1,
PyDoc_STR("() -> string")},
{"as_tuple", (PyCFunction)FSSpec_as_tuple, 1,
PyDoc_STR("() -> (vRefNum, dirID, name)")},
{NULL, NULL, 0}
};
static PyObject *FSSpec_get_data(FSSpecObject *self, void *closure)
{
return PyString_FromStringAndSize((char *)&self->ob_itself, sizeof(self->ob_itself));
}
#define FSSpec_set_data NULL
static PyGetSetDef FSSpec_getsetlist[] = {
{"data", (getter)FSSpec_get_data, (setter)FSSpec_set_data, "Raw data of the FSSpec object"},
{NULL, NULL, NULL, NULL},
};
#define FSSpec_compare NULL
static PyObject * FSSpec_repr(FSSpecObject *self)
{
char buf[512];
PyOS_snprintf(buf, sizeof(buf), "%s((%d, %ld, '%.*s'))",
self->ob_type->tp_name,
self->ob_itself.vRefNum,
self->ob_itself.parID,
self->ob_itself.name[0], self->ob_itself.name+1);
return PyString_FromString(buf);
}
#define FSSpec_hash NULL
static int FSSpec_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
{
PyObject *v = NULL;
char *rawdata = NULL;
int rawdatalen = 0;
static char *kw[] = {"itself", "rawdata", 0};
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|Os#", kw, &v, &rawdata, &rawdatalen))
return -1;
if (v && rawdata)
{
PyErr_SetString(PyExc_TypeError, "Only one of itself or rawdata may be specified");
return -1;
}
if (!v && !rawdata)
{
PyErr_SetString(PyExc_TypeError, "One of itself or rawdata must be specified");
return -1;
}
if (rawdata)
{
if (rawdatalen != sizeof(FSSpec))
{
PyErr_SetString(PyExc_TypeError, "FSSpec rawdata incorrect size");
return -1;
}
memcpy(&((FSSpecObject *)self)->ob_itself, rawdata, rawdatalen);
return 0;
}
if (PyMac_GetFSSpec(v, &((FSSpecObject *)self)->ob_itself)) return 0;
return -1;
}
#define FSSpec_tp_alloc PyType_GenericAlloc
static PyObject *FSSpec_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
PyObject *self;
if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
memset(&((FSSpecObject *)self)->ob_itself, 0, sizeof(FSSpec));
return self;
}
#define FSSpec_tp_free PyObject_Del
static PyTypeObject FSSpec_Type = {
PyObject_HEAD_INIT(NULL)
0, /*ob_size*/
"Carbon.File.FSSpec", /*tp_name*/
sizeof(FSSpecObject), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor) FSSpec_dealloc, /*tp_dealloc*/
0, /*tp_print*/
(getattrfunc)0, /*tp_getattr*/
(setattrfunc)0, /*tp_setattr*/
(cmpfunc) FSSpec_compare, /*tp_compare*/
(reprfunc) FSSpec_repr, /*tp_repr*/
(PyNumberMethods *)0, /* tp_as_number */
(PySequenceMethods *)0, /* tp_as_sequence */
(PyMappingMethods *)0, /* tp_as_mapping */
(hashfunc) FSSpec_hash, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
PyObject_GenericGetAttr, /*tp_getattro*/
PyObject_GenericSetAttr, /*tp_setattro */
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /*tp_doc*/
0, /*tp_traverse*/
0, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
FSSpec_methods, /* tp_methods */
0, /*tp_members*/
FSSpec_getsetlist, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
FSSpec_tp_init, /* tp_init */
FSSpec_tp_alloc, /* tp_alloc */
FSSpec_tp_new, /* tp_new */
FSSpec_tp_free, /* tp_free */
};
/* --------------------- End object type FSSpec --------------------- */
/* ----------------------- Object type FSRef ------------------------ */
static PyTypeObject FSRef_Type;
#define FSRef_Check(x) ((x)->ob_type == &FSRef_Type || PyObject_TypeCheck((x), &FSRef_Type))
typedef struct FSRefObject {
PyObject_HEAD
FSRef ob_itself;
} FSRefObject;
static PyObject *FSRef_New(FSRef *itself)
{
FSRefObject *it;
if (itself == NULL) return PyMac_Error(resNotFound);
it = PyObject_NEW(FSRefObject, &FSRef_Type);
if (it == NULL) return NULL;
it->ob_itself = *itself;
return (PyObject *)it;
}
static void FSRef_dealloc(FSRefObject *self)
{
/* Cleanup of self->ob_itself goes here */
self->ob_type->tp_free((PyObject *)self);
}
static PyObject *FSRef_FSMakeFSRefUnicode(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
UniChar *nameLength__in__;
UniCharCount nameLength__len__;
int nameLength__in_len__;
TextEncoding textEncodingHint;
FSRef newRef;
if (!PyArg_ParseTuple(_args, "u#l",
&nameLength__in__, &nameLength__in_len__,
&textEncodingHint))
return NULL;
nameLength__len__ = nameLength__in_len__;
_err = FSMakeFSRefUnicode(&_self->ob_itself,
nameLength__len__, nameLength__in__,
textEncodingHint,
&newRef);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
FSRef_New, &newRef);
return _res;
}
static PyObject *FSRef_FSCompareFSRefs(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef ref2;
if (!PyArg_ParseTuple(_args, "O&",
FSRef_Convert, &ref2))
return NULL;
_err = FSCompareFSRefs(&_self->ob_itself,
&ref2);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSRef_FSCreateFileUnicode(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
UniChar *nameLength__in__;
UniCharCount nameLength__len__;
int nameLength__in_len__;
FSCatalogInfoBitmap whichInfo;
FSCatalogInfo catalogInfo;
FSRef newRef;
FSSpec newSpec;
if (!PyArg_ParseTuple(_args, "u#lO&",
&nameLength__in__, &nameLength__in_len__,
&whichInfo,
FSCatalogInfo_Convert, &catalogInfo))
return NULL;
nameLength__len__ = nameLength__in_len__;
_err = FSCreateFileUnicode(&_self->ob_itself,
nameLength__len__, nameLength__in__,
whichInfo,
&catalogInfo,
&newRef,
&newSpec);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&O&",
FSRef_New, &newRef,
FSSpec_New, &newSpec);
return _res;
}
static PyObject *FSRef_FSCreateDirectoryUnicode(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
UniChar *nameLength__in__;
UniCharCount nameLength__len__;
int nameLength__in_len__;
FSCatalogInfoBitmap whichInfo;
FSCatalogInfo catalogInfo;
FSRef newRef;
FSSpec newSpec;
UInt32 newDirID;
if (!PyArg_ParseTuple(_args, "u#lO&",
&nameLength__in__, &nameLength__in_len__,
&whichInfo,
FSCatalogInfo_Convert, &catalogInfo))
return NULL;
nameLength__len__ = nameLength__in_len__;
_err = FSCreateDirectoryUnicode(&_self->ob_itself,
nameLength__len__, nameLength__in__,
whichInfo,
&catalogInfo,
&newRef,
&newSpec,
&newDirID);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&O&l",
FSRef_New, &newRef,
FSSpec_New, &newSpec,
newDirID);
return _res;
}
static PyObject *FSRef_FSDeleteObject(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSDeleteObject(&_self->ob_itself);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSRef_FSMoveObject(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef destDirectory;
FSRef newRef;
if (!PyArg_ParseTuple(_args, "O&",
FSRef_Convert, &destDirectory))
return NULL;
_err = FSMoveObject(&_self->ob_itself,
&destDirectory,
&newRef);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
FSRef_New, &newRef);
return _res;
}
static PyObject *FSRef_FSExchangeObjects(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef destRef;
if (!PyArg_ParseTuple(_args, "O&",
FSRef_Convert, &destRef))
return NULL;
_err = FSExchangeObjects(&_self->ob_itself,
&destRef);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSRef_FSRenameUnicode(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
UniChar *nameLength__in__;
UniCharCount nameLength__len__;
int nameLength__in_len__;
TextEncoding textEncodingHint;
FSRef newRef;
if (!PyArg_ParseTuple(_args, "u#l",
&nameLength__in__, &nameLength__in_len__,
&textEncodingHint))
return NULL;
nameLength__len__ = nameLength__in_len__;
_err = FSRenameUnicode(&_self->ob_itself,
nameLength__len__, nameLength__in__,
textEncodingHint,
&newRef);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
FSRef_New, &newRef);
return _res;
}
static PyObject *FSRef_FSGetCatalogInfo(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSCatalogInfoBitmap whichInfo;
FSCatalogInfo catalogInfo;
HFSUniStr255 outName;
FSSpec fsSpec;
FSRef parentRef;
if (!PyArg_ParseTuple(_args, "l",
&whichInfo))
return NULL;
_err = FSGetCatalogInfo(&_self->ob_itself,
whichInfo,
&catalogInfo,
&outName,
&fsSpec,
&parentRef);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&O&O&O&",
FSCatalogInfo_New, &catalogInfo,
PyMac_BuildHFSUniStr255, &outName,
FSSpec_New, &fsSpec,
FSRef_New, &parentRef);
return _res;
}
static PyObject *FSRef_FSSetCatalogInfo(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSCatalogInfoBitmap whichInfo;
FSCatalogInfo catalogInfo;
if (!PyArg_ParseTuple(_args, "lO&",
&whichInfo,
FSCatalogInfo_Convert, &catalogInfo))
return NULL;
_err = FSSetCatalogInfo(&_self->ob_itself,
whichInfo,
&catalogInfo);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSRef_FSCreateFork(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
UniChar *forkNameLength__in__;
UniCharCount forkNameLength__len__;
int forkNameLength__in_len__;
if (!PyArg_ParseTuple(_args, "u#",
&forkNameLength__in__, &forkNameLength__in_len__))
return NULL;
forkNameLength__len__ = forkNameLength__in_len__;
_err = FSCreateFork(&_self->ob_itself,
forkNameLength__len__, forkNameLength__in__);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSRef_FSDeleteFork(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
UniChar *forkNameLength__in__;
UniCharCount forkNameLength__len__;
int forkNameLength__in_len__;
if (!PyArg_ParseTuple(_args, "u#",
&forkNameLength__in__, &forkNameLength__in_len__))
return NULL;
forkNameLength__len__ = forkNameLength__in_len__;
_err = FSDeleteFork(&_self->ob_itself,
forkNameLength__len__, forkNameLength__in__);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *FSRef_FSOpenFork(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
UniChar *forkNameLength__in__;
UniCharCount forkNameLength__len__;
int forkNameLength__in_len__;
SInt8 permissions;
SInt16 forkRefNum;
if (!PyArg_ParseTuple(_args, "u#b",
&forkNameLength__in__, &forkNameLength__in_len__,
&permissions))
return NULL;
forkNameLength__len__ = forkNameLength__in_len__;
_err = FSOpenFork(&_self->ob_itself,
forkNameLength__len__, forkNameLength__in__,
permissions,
&forkRefNum);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("h",
forkRefNum);
return _res;
}
#if TARGET_API_MAC_OSX
static PyObject *FSRef_FNNotify(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSStatus _err;
FNMessage message;
OptionBits flags;
if (!PyArg_ParseTuple(_args, "ll",
&message,
&flags))
return NULL;
_err = FNNotify(&_self->ob_itself,
message,
flags);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
#endif
static PyObject *FSRef_FSNewAliasMinimal(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
AliasHandle inAlias;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSNewAliasMinimal(&_self->ob_itself,
&inAlias);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
Alias_New, inAlias);
return _res;
}
static PyObject *FSRef_FSIsAliasFile(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
Boolean aliasFileFlag;
Boolean folderFlag;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSIsAliasFile(&_self->ob_itself,
&aliasFileFlag,
&folderFlag);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("bb",
aliasFileFlag,
folderFlag);
return _res;
}
static PyObject *FSRef_FSRefMakePath(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSStatus _err;
#define MAXPATHNAME 1024
UInt8 path[MAXPATHNAME];
UInt32 maxPathSize = MAXPATHNAME;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSRefMakePath(&_self->ob_itself,
path,
maxPathSize);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("s", path);
return _res;
}
static PyObject *FSRef_as_pathname(FSRefObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
#if TARGET_API_MAC_OSX
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_res = FSRef_FSRefMakePath(_self, _args);
#else
char strbuf[1024];
OSErr err;
FSSpec fss;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
if ( !PyMac_GetFSSpec((PyObject *)_self, &fss))
return NULL;
err = PyMac_GetFullPathname(&fss, strbuf, sizeof(strbuf));
if ( err ) {
PyMac_Error(err);
return NULL;
}
_res = PyString_FromString(strbuf);
#endif
return _res;
}
static PyMethodDef FSRef_methods[] = {
{"FSMakeFSRefUnicode", (PyCFunction)FSRef_FSMakeFSRefUnicode, 1,
PyDoc_STR("(Buffer nameLength, TextEncoding textEncodingHint) -> (FSRef newRef)")},
{"FSCompareFSRefs", (PyCFunction)FSRef_FSCompareFSRefs, 1,
PyDoc_STR("(FSRef ref2) -> None")},
{"FSCreateFileUnicode", (PyCFunction)FSRef_FSCreateFileUnicode, 1,
PyDoc_STR("(Buffer nameLength, FSCatalogInfoBitmap whichInfo, FSCatalogInfo catalogInfo) -> (FSRef newRef, FSSpec newSpec)")},
{"FSCreateDirectoryUnicode", (PyCFunction)FSRef_FSCreateDirectoryUnicode, 1,
PyDoc_STR("(Buffer nameLength, FSCatalogInfoBitmap whichInfo, FSCatalogInfo catalogInfo) -> (FSRef newRef, FSSpec newSpec, UInt32 newDirID)")},
{"FSDeleteObject", (PyCFunction)FSRef_FSDeleteObject, 1,
PyDoc_STR("() -> None")},
{"FSMoveObject", (PyCFunction)FSRef_FSMoveObject, 1,
PyDoc_STR("(FSRef destDirectory) -> (FSRef newRef)")},
{"FSExchangeObjects", (PyCFunction)FSRef_FSExchangeObjects, 1,
PyDoc_STR("(FSRef destRef) -> None")},
{"FSRenameUnicode", (PyCFunction)FSRef_FSRenameUnicode, 1,
PyDoc_STR("(Buffer nameLength, TextEncoding textEncodingHint) -> (FSRef newRef)")},
{"FSGetCatalogInfo", (PyCFunction)FSRef_FSGetCatalogInfo, 1,
PyDoc_STR("(FSCatalogInfoBitmap whichInfo) -> (FSCatalogInfo catalogInfo, HFSUniStr255 outName, FSSpec fsSpec, FSRef parentRef)")},
{"FSSetCatalogInfo", (PyCFunction)FSRef_FSSetCatalogInfo, 1,
PyDoc_STR("(FSCatalogInfoBitmap whichInfo, FSCatalogInfo catalogInfo) -> None")},
{"FSCreateFork", (PyCFunction)FSRef_FSCreateFork, 1,
PyDoc_STR("(Buffer forkNameLength) -> None")},
{"FSDeleteFork", (PyCFunction)FSRef_FSDeleteFork, 1,
PyDoc_STR("(Buffer forkNameLength) -> None")},
{"FSOpenFork", (PyCFunction)FSRef_FSOpenFork, 1,
PyDoc_STR("(Buffer forkNameLength, SInt8 permissions) -> (SInt16 forkRefNum)")},
#if TARGET_API_MAC_OSX
{"FNNotify", (PyCFunction)FSRef_FNNotify, 1,
PyDoc_STR("(FNMessage message, OptionBits flags) -> None")},
#endif
{"FSNewAliasMinimal", (PyCFunction)FSRef_FSNewAliasMinimal, 1,
PyDoc_STR("() -> (AliasHandle inAlias)")},
{"FSIsAliasFile", (PyCFunction)FSRef_FSIsAliasFile, 1,
PyDoc_STR("() -> (Boolean aliasFileFlag, Boolean folderFlag)")},
{"FSRefMakePath", (PyCFunction)FSRef_FSRefMakePath, 1,
PyDoc_STR("() -> string")},
{"as_pathname", (PyCFunction)FSRef_as_pathname, 1,
PyDoc_STR("() -> string")},
{NULL, NULL, 0}
};
static PyObject *FSRef_get_data(FSRefObject *self, void *closure)
{
return PyString_FromStringAndSize((char *)&self->ob_itself, sizeof(self->ob_itself));
}
#define FSRef_set_data NULL
static PyGetSetDef FSRef_getsetlist[] = {
{"data", (getter)FSRef_get_data, (setter)FSRef_set_data, "Raw data of the FSRef object"},
{NULL, NULL, NULL, NULL},
};
#define FSRef_compare NULL
#define FSRef_repr NULL
#define FSRef_hash NULL
static int FSRef_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
{
PyObject *v = NULL;
char *rawdata = NULL;
int rawdatalen = 0;
static char *kw[] = {"itself", "rawdata", 0};
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|Os#", kw, &v, &rawdata, &rawdatalen))
return -1;
if (v && rawdata)
{
PyErr_SetString(PyExc_TypeError, "Only one of itself or rawdata may be specified");
return -1;
}
if (!v && !rawdata)
{
PyErr_SetString(PyExc_TypeError, "One of itself or rawdata must be specified");
return -1;
}
if (rawdata)
{
if (rawdatalen != sizeof(FSRef))
{
PyErr_SetString(PyExc_TypeError, "FSRef rawdata incorrect size");
return -1;
}
memcpy(&((FSRefObject *)self)->ob_itself, rawdata, rawdatalen);
return 0;
}
if (PyMac_GetFSRef(v, &((FSRefObject *)self)->ob_itself)) return 0;
return -1;
}
#define FSRef_tp_alloc PyType_GenericAlloc
static PyObject *FSRef_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
PyObject *self;
if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
memset(&((FSRefObject *)self)->ob_itself, 0, sizeof(FSRef));
return self;
}
#define FSRef_tp_free PyObject_Del
static PyTypeObject FSRef_Type = {
PyObject_HEAD_INIT(NULL)
0, /*ob_size*/
"Carbon.File.FSRef", /*tp_name*/
sizeof(FSRefObject), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor) FSRef_dealloc, /*tp_dealloc*/
0, /*tp_print*/
(getattrfunc)0, /*tp_getattr*/
(setattrfunc)0, /*tp_setattr*/
(cmpfunc) FSRef_compare, /*tp_compare*/
(reprfunc) FSRef_repr, /*tp_repr*/
(PyNumberMethods *)0, /* tp_as_number */
(PySequenceMethods *)0, /* tp_as_sequence */
(PyMappingMethods *)0, /* tp_as_mapping */
(hashfunc) FSRef_hash, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
PyObject_GenericGetAttr, /*tp_getattro*/
PyObject_GenericSetAttr, /*tp_setattro */
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /*tp_doc*/
0, /*tp_traverse*/
0, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
FSRef_methods, /* tp_methods */
0, /*tp_members*/
FSRef_getsetlist, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
FSRef_tp_init, /* tp_init */
FSRef_tp_alloc, /* tp_alloc */
FSRef_tp_new, /* tp_new */
FSRef_tp_free, /* tp_free */
};
/* --------------------- End object type FSRef ---------------------- */
static PyObject *File_UnmountVol(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
Str63 volName;
short vRefNum;
if (!PyArg_ParseTuple(_args, "O&h",
PyMac_GetStr255, volName,
&vRefNum))
return NULL;
_err = UnmountVol(volName,
vRefNum);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_FlushVol(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
Str63 volName;
short vRefNum;
if (!PyArg_ParseTuple(_args, "O&h",
PyMac_GetStr255, volName,
&vRefNum))
return NULL;
_err = FlushVol(volName,
vRefNum);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_HSetVol(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
Str63 volName;
short vRefNum;
long dirID;
if (!PyArg_ParseTuple(_args, "O&hl",
PyMac_GetStr255, volName,
&vRefNum,
&dirID))
return NULL;
_err = HSetVol(volName,
vRefNum,
dirID);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_FSClose(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short refNum;
if (!PyArg_ParseTuple(_args, "h",
&refNum))
return NULL;
_err = FSClose(refNum);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_Allocate(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short refNum;
long count;
if (!PyArg_ParseTuple(_args, "h",
&refNum))
return NULL;
_err = Allocate(refNum,
&count);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("l",
count);
return _res;
}
static PyObject *File_GetEOF(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short refNum;
long logEOF;
if (!PyArg_ParseTuple(_args, "h",
&refNum))
return NULL;
_err = GetEOF(refNum,
&logEOF);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("l",
logEOF);
return _res;
}
static PyObject *File_SetEOF(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short refNum;
long logEOF;
if (!PyArg_ParseTuple(_args, "hl",
&refNum,
&logEOF))
return NULL;
_err = SetEOF(refNum,
logEOF);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_GetFPos(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short refNum;
long filePos;
if (!PyArg_ParseTuple(_args, "h",
&refNum))
return NULL;
_err = GetFPos(refNum,
&filePos);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("l",
filePos);
return _res;
}
static PyObject *File_SetFPos(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short refNum;
short posMode;
long posOff;
if (!PyArg_ParseTuple(_args, "hhl",
&refNum,
&posMode,
&posOff))
return NULL;
_err = SetFPos(refNum,
posMode,
posOff);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_GetVRefNum(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short fileRefNum;
short vRefNum;
if (!PyArg_ParseTuple(_args, "h",
&fileRefNum))
return NULL;
_err = GetVRefNum(fileRefNum,
&vRefNum);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("h",
vRefNum);
return _res;
}
static PyObject *File_HGetVol(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
StringPtr volName;
short vRefNum;
long dirID;
if (!PyArg_ParseTuple(_args, "O&",
PyMac_GetStr255, &volName))
return NULL;
_err = HGetVol(volName,
&vRefNum,
&dirID);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("hl",
vRefNum,
dirID);
return _res;
}
static PyObject *File_HOpen(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
SInt8 permission;
short refNum;
if (!PyArg_ParseTuple(_args, "hlO&b",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName,
&permission))
return NULL;
_err = HOpen(vRefNum,
dirID,
fileName,
permission,
&refNum);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("h",
refNum);
return _res;
}
static PyObject *File_HOpenDF(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
SInt8 permission;
short refNum;
if (!PyArg_ParseTuple(_args, "hlO&b",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName,
&permission))
return NULL;
_err = HOpenDF(vRefNum,
dirID,
fileName,
permission,
&refNum);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("h",
refNum);
return _res;
}
static PyObject *File_HOpenRF(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
SInt8 permission;
short refNum;
if (!PyArg_ParseTuple(_args, "hlO&b",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName,
&permission))
return NULL;
_err = HOpenRF(vRefNum,
dirID,
fileName,
permission,
&refNum);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("h",
refNum);
return _res;
}
static PyObject *File_AllocContig(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short refNum;
long count;
if (!PyArg_ParseTuple(_args, "h",
&refNum))
return NULL;
_err = AllocContig(refNum,
&count);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("l",
count);
return _res;
}
static PyObject *File_HCreate(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
OSType creator;
OSType fileType;
if (!PyArg_ParseTuple(_args, "hlO&O&O&",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName,
PyMac_GetOSType, &creator,
PyMac_GetOSType, &fileType))
return NULL;
_err = HCreate(vRefNum,
dirID,
fileName,
creator,
fileType);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_DirCreate(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long parentDirID;
Str255 directoryName;
long createdDirID;
if (!PyArg_ParseTuple(_args, "hlO&",
&vRefNum,
&parentDirID,
PyMac_GetStr255, directoryName))
return NULL;
_err = DirCreate(vRefNum,
parentDirID,
directoryName,
&createdDirID);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("l",
createdDirID);
return _res;
}
static PyObject *File_HDelete(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
if (!PyArg_ParseTuple(_args, "hlO&",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName))
return NULL;
_err = HDelete(vRefNum,
dirID,
fileName);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_HGetFInfo(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
FInfo fndrInfo;
if (!PyArg_ParseTuple(_args, "hlO&",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName))
return NULL;
_err = HGetFInfo(vRefNum,
dirID,
fileName,
&fndrInfo);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
FInfo_New, &fndrInfo);
return _res;
}
static PyObject *File_HSetFInfo(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
FInfo fndrInfo;
if (!PyArg_ParseTuple(_args, "hlO&O&",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName,
FInfo_Convert, &fndrInfo))
return NULL;
_err = HSetFInfo(vRefNum,
dirID,
fileName,
&fndrInfo);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_HSetFLock(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
if (!PyArg_ParseTuple(_args, "hlO&",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName))
return NULL;
_err = HSetFLock(vRefNum,
dirID,
fileName);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_HRstFLock(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
if (!PyArg_ParseTuple(_args, "hlO&",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName))
return NULL;
_err = HRstFLock(vRefNum,
dirID,
fileName);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_HRename(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 oldName;
Str255 newName;
if (!PyArg_ParseTuple(_args, "hlO&O&",
&vRefNum,
&dirID,
PyMac_GetStr255, oldName,
PyMac_GetStr255, newName))
return NULL;
_err = HRename(vRefNum,
dirID,
oldName,
newName);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_CatMove(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 oldName;
long newDirID;
Str255 newName;
if (!PyArg_ParseTuple(_args, "hlO&lO&",
&vRefNum,
&dirID,
PyMac_GetStr255, oldName,
&newDirID,
PyMac_GetStr255, newName))
return NULL;
_err = CatMove(vRefNum,
dirID,
oldName,
newDirID,
newName);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_FSMakeFSSpec(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
short vRefNum;
long dirID;
Str255 fileName;
FSSpec spec;
if (!PyArg_ParseTuple(_args, "hlO&",
&vRefNum,
&dirID,
PyMac_GetStr255, fileName))
return NULL;
_err = FSMakeFSSpec(vRefNum,
dirID,
fileName,
&spec);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
FSSpec_New, &spec);
return _res;
}
static PyObject *File_FSGetForkPosition(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
SInt16 forkRefNum;
SInt64 position;
if (!PyArg_ParseTuple(_args, "h",
&forkRefNum))
return NULL;
_err = FSGetForkPosition(forkRefNum,
&position);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("L",
position);
return _res;
}
static PyObject *File_FSSetForkPosition(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
SInt16 forkRefNum;
UInt16 positionMode;
SInt64 positionOffset;
if (!PyArg_ParseTuple(_args, "hHL",
&forkRefNum,
&positionMode,
&positionOffset))
return NULL;
_err = FSSetForkPosition(forkRefNum,
positionMode,
positionOffset);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_FSGetForkSize(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
SInt16 forkRefNum;
SInt64 forkSize;
if (!PyArg_ParseTuple(_args, "h",
&forkRefNum))
return NULL;
_err = FSGetForkSize(forkRefNum,
&forkSize);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("L",
forkSize);
return _res;
}
static PyObject *File_FSSetForkSize(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
SInt16 forkRefNum;
UInt16 positionMode;
SInt64 positionOffset;
if (!PyArg_ParseTuple(_args, "hHL",
&forkRefNum,
&positionMode,
&positionOffset))
return NULL;
_err = FSSetForkSize(forkRefNum,
positionMode,
positionOffset);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_FSAllocateFork(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
SInt16 forkRefNum;
FSAllocationFlags flags;
UInt16 positionMode;
SInt64 positionOffset;
UInt64 requestCount;
UInt64 actualCount;
if (!PyArg_ParseTuple(_args, "hHHLL",
&forkRefNum,
&flags,
&positionMode,
&positionOffset,
&requestCount))
return NULL;
_err = FSAllocateFork(forkRefNum,
flags,
positionMode,
positionOffset,
requestCount,
&actualCount);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("L",
actualCount);
return _res;
}
static PyObject *File_FSFlushFork(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
SInt16 forkRefNum;
if (!PyArg_ParseTuple(_args, "h",
&forkRefNum))
return NULL;
_err = FSFlushFork(forkRefNum);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_FSCloseFork(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
SInt16 forkRefNum;
if (!PyArg_ParseTuple(_args, "h",
&forkRefNum))
return NULL;
_err = FSCloseFork(forkRefNum);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
static PyObject *File_FSGetDataForkName(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
HFSUniStr255 dataForkName;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSGetDataForkName(&dataForkName);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
PyMac_BuildHFSUniStr255, &dataForkName);
return _res;
}
static PyObject *File_FSGetResourceForkName(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
HFSUniStr255 resourceForkName;
if (!PyArg_ParseTuple(_args, ""))
return NULL;
_err = FSGetResourceForkName(&resourceForkName);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
PyMac_BuildHFSUniStr255, &resourceForkName);
return _res;
}
static PyObject *File_FSPathMakeRef(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSStatus _err;
UInt8 * path;
FSRef ref;
Boolean isDirectory;
if (!PyArg_ParseTuple(_args, "s",
&path))
return NULL;
_err = FSPathMakeRef(path,
&ref,
&isDirectory);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&b",
FSRef_New, &ref,
isDirectory);
return _res;
}
#if TARGET_API_MAC_OSX
static PyObject *File_FNNotifyByPath(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSStatus _err;
UInt8 * path;
FNMessage message;
OptionBits flags;
if (!PyArg_ParseTuple(_args, "sll",
&path,
&message,
&flags))
return NULL;
_err = FNNotifyByPath(path,
message,
flags);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
#endif
#if TARGET_API_MAC_OSX
static PyObject *File_FNNotifyAll(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSStatus _err;
FNMessage message;
OptionBits flags;
if (!PyArg_ParseTuple(_args, "ll",
&message,
&flags))
return NULL;
_err = FNNotifyAll(message,
flags);
if (_err != noErr) return PyMac_Error(_err);
Py_INCREF(Py_None);
_res = Py_None;
return _res;
}
#endif
static PyObject *File_NewAlias(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec fromFile__buf__;
FSSpec *fromFile = &fromFile__buf__;
FSSpec target;
AliasHandle alias;
if (!PyArg_ParseTuple(_args, "O&O&",
myPyMac_GetOptFSSpecPtr, &fromFile,
FSSpec_Convert, &target))
return NULL;
_err = NewAlias(fromFile,
&target,
&alias);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
Alias_New, alias);
return _res;
}
static PyObject *File_NewAliasMinimalFromFullPath(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
char *fullPath__in__;
int fullPath__len__;
int fullPath__in_len__;
Str32 zoneName;
Str31 serverName;
AliasHandle alias;
if (!PyArg_ParseTuple(_args, "s#O&O&",
&fullPath__in__, &fullPath__in_len__,
PyMac_GetStr255, zoneName,
PyMac_GetStr255, serverName))
return NULL;
fullPath__len__ = fullPath__in_len__;
_err = NewAliasMinimalFromFullPath(fullPath__len__, fullPath__in__,
zoneName,
serverName,
&alias);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
Alias_New, alias);
return _res;
}
static PyObject *File_ResolveAliasFile(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec theSpec;
Boolean resolveAliasChains;
Boolean targetIsFolder;
Boolean wasAliased;
if (!PyArg_ParseTuple(_args, "O&b",
FSSpec_Convert, &theSpec,
&resolveAliasChains))
return NULL;
_err = ResolveAliasFile(&theSpec,
resolveAliasChains,
&targetIsFolder,
&wasAliased);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&bb",
FSSpec_New, &theSpec,
targetIsFolder,
wasAliased);
return _res;
}
static PyObject *File_ResolveAliasFileWithMountFlags(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec theSpec;
Boolean resolveAliasChains;
Boolean targetIsFolder;
Boolean wasAliased;
unsigned long mountFlags;
if (!PyArg_ParseTuple(_args, "O&bl",
FSSpec_Convert, &theSpec,
&resolveAliasChains,
&mountFlags))
return NULL;
_err = ResolveAliasFileWithMountFlags(&theSpec,
resolveAliasChains,
&targetIsFolder,
&wasAliased,
mountFlags);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&bb",
FSSpec_New, &theSpec,
targetIsFolder,
wasAliased);
return _res;
}
static PyObject *File_UpdateAlias(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec fromFile__buf__;
FSSpec *fromFile = &fromFile__buf__;
FSSpec target;
AliasHandle alias;
Boolean wasChanged;
if (!PyArg_ParseTuple(_args, "O&O&O&",
myPyMac_GetOptFSSpecPtr, &fromFile,
FSSpec_Convert, &target,
Alias_Convert, &alias))
return NULL;
_err = UpdateAlias(fromFile,
&target,
alias,
&wasChanged);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("b",
wasChanged);
return _res;
}
static PyObject *File_ResolveAliasFileWithMountFlagsNoUI(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSSpec theSpec;
Boolean resolveAliasChains;
Boolean targetIsFolder;
Boolean wasAliased;
unsigned long mountFlags;
if (!PyArg_ParseTuple(_args, "O&bl",
FSSpec_Convert, &theSpec,
&resolveAliasChains,
&mountFlags))
return NULL;
_err = ResolveAliasFileWithMountFlagsNoUI(&theSpec,
resolveAliasChains,
&targetIsFolder,
&wasAliased,
mountFlags);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&bb",
FSSpec_New, &theSpec,
targetIsFolder,
wasAliased);
return _res;
}
static PyObject *File_FSNewAlias(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef fromFile__buf__;
FSRef *fromFile = &fromFile__buf__;
FSRef target;
AliasHandle inAlias;
if (!PyArg_ParseTuple(_args, "O&O&",
myPyMac_GetOptFSRefPtr, &fromFile,
FSRef_Convert, &target))
return NULL;
_err = FSNewAlias(fromFile,
&target,
&inAlias);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
Alias_New, inAlias);
return _res;
}
static PyObject *File_FSResolveAliasFileWithMountFlags(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef theRef;
Boolean resolveAliasChains;
Boolean targetIsFolder;
Boolean wasAliased;
unsigned long mountFlags;
if (!PyArg_ParseTuple(_args, "O&bl",
FSRef_Convert, &theRef,
&resolveAliasChains,
&mountFlags))
return NULL;
_err = FSResolveAliasFileWithMountFlags(&theRef,
resolveAliasChains,
&targetIsFolder,
&wasAliased,
mountFlags);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&bb",
FSRef_New, &theRef,
targetIsFolder,
wasAliased);
return _res;
}
static PyObject *File_FSResolveAliasFile(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef theRef;
Boolean resolveAliasChains;
Boolean targetIsFolder;
Boolean wasAliased;
if (!PyArg_ParseTuple(_args, "O&b",
FSRef_Convert, &theRef,
&resolveAliasChains))
return NULL;
_err = FSResolveAliasFile(&theRef,
resolveAliasChains,
&targetIsFolder,
&wasAliased);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&bb",
FSRef_New, &theRef,
targetIsFolder,
wasAliased);
return _res;
}
static PyObject *File_FSUpdateAlias(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
OSErr _err;
FSRef fromFile__buf__;
FSRef *fromFile = &fromFile__buf__;
FSRef target;
AliasHandle alias;
Boolean wasChanged;
if (!PyArg_ParseTuple(_args, "O&O&O&",
myPyMac_GetOptFSRefPtr, &fromFile,
FSRef_Convert, &target,
Alias_Convert, &alias))
return NULL;
_err = FSUpdateAlias(fromFile,
&target,
alias,
&wasChanged);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("b",
wasChanged);
return _res;
}
static PyObject *File_pathname(PyObject *_self, PyObject *_args)
{
PyObject *_res = NULL;
PyObject *obj;
if (!PyArg_ParseTuple(_args, "O", &obj))
return NULL;
if (PyString_Check(obj)) {
Py_INCREF(obj);
return obj;
}
if (PyUnicode_Check(obj))
return PyUnicode_AsEncodedString(obj, "utf8", "strict");
_res = PyObject_CallMethod(obj, "as_pathname", NULL);
return _res;
}
static PyMethodDef File_methods[] = {
{"UnmountVol", (PyCFunction)File_UnmountVol, 1,
PyDoc_STR("(Str63 volName, short vRefNum) -> None")},
{"FlushVol", (PyCFunction)File_FlushVol, 1,
PyDoc_STR("(Str63 volName, short vRefNum) -> None")},
{"HSetVol", (PyCFunction)File_HSetVol, 1,
PyDoc_STR("(Str63 volName, short vRefNum, long dirID) -> None")},
{"FSClose", (PyCFunction)File_FSClose, 1,
PyDoc_STR("(short refNum) -> None")},
{"Allocate", (PyCFunction)File_Allocate, 1,
PyDoc_STR("(short refNum) -> (long count)")},
{"GetEOF", (PyCFunction)File_GetEOF, 1,
PyDoc_STR("(short refNum) -> (long logEOF)")},
{"SetEOF", (PyCFunction)File_SetEOF, 1,
PyDoc_STR("(short refNum, long logEOF) -> None")},
{"GetFPos", (PyCFunction)File_GetFPos, 1,
PyDoc_STR("(short refNum) -> (long filePos)")},
{"SetFPos", (PyCFunction)File_SetFPos, 1,
PyDoc_STR("(short refNum, short posMode, long posOff) -> None")},
{"GetVRefNum", (PyCFunction)File_GetVRefNum, 1,
PyDoc_STR("(short fileRefNum) -> (short vRefNum)")},
{"HGetVol", (PyCFunction)File_HGetVol, 1,
PyDoc_STR("(StringPtr volName) -> (short vRefNum, long dirID)")},
{"HOpen", (PyCFunction)File_HOpen, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, SInt8 permission) -> (short refNum)")},
{"HOpenDF", (PyCFunction)File_HOpenDF, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, SInt8 permission) -> (short refNum)")},
{"HOpenRF", (PyCFunction)File_HOpenRF, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, SInt8 permission) -> (short refNum)")},
{"AllocContig", (PyCFunction)File_AllocContig, 1,
PyDoc_STR("(short refNum) -> (long count)")},
{"HCreate", (PyCFunction)File_HCreate, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, OSType creator, OSType fileType) -> None")},
{"DirCreate", (PyCFunction)File_DirCreate, 1,
PyDoc_STR("(short vRefNum, long parentDirID, Str255 directoryName) -> (long createdDirID)")},
{"HDelete", (PyCFunction)File_HDelete, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> None")},
{"HGetFInfo", (PyCFunction)File_HGetFInfo, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> (FInfo fndrInfo)")},
{"HSetFInfo", (PyCFunction)File_HSetFInfo, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, FInfo fndrInfo) -> None")},
{"HSetFLock", (PyCFunction)File_HSetFLock, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> None")},
{"HRstFLock", (PyCFunction)File_HRstFLock, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> None")},
{"HRename", (PyCFunction)File_HRename, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 oldName, Str255 newName) -> None")},
{"CatMove", (PyCFunction)File_CatMove, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 oldName, long newDirID, Str255 newName) -> None")},
{"FSMakeFSSpec", (PyCFunction)File_FSMakeFSSpec, 1,
PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> (FSSpec spec)")},
{"FSGetForkPosition", (PyCFunction)File_FSGetForkPosition, 1,
PyDoc_STR("(SInt16 forkRefNum) -> (SInt64 position)")},
{"FSSetForkPosition", (PyCFunction)File_FSSetForkPosition, 1,
PyDoc_STR("(SInt16 forkRefNum, UInt16 positionMode, SInt64 positionOffset) -> None")},
{"FSGetForkSize", (PyCFunction)File_FSGetForkSize, 1,
PyDoc_STR("(SInt16 forkRefNum) -> (SInt64 forkSize)")},
{"FSSetForkSize", (PyCFunction)File_FSSetForkSize, 1,
PyDoc_STR("(SInt16 forkRefNum, UInt16 positionMode, SInt64 positionOffset) -> None")},
{"FSAllocateFork", (PyCFunction)File_FSAllocateFork, 1,
PyDoc_STR("(SInt16 forkRefNum, FSAllocationFlags flags, UInt16 positionMode, SInt64 positionOffset, UInt64 requestCount) -> (UInt64 actualCount)")},
{"FSFlushFork", (PyCFunction)File_FSFlushFork, 1,
PyDoc_STR("(SInt16 forkRefNum) -> None")},
{"FSCloseFork", (PyCFunction)File_FSCloseFork, 1,
PyDoc_STR("(SInt16 forkRefNum) -> None")},
{"FSGetDataForkName", (PyCFunction)File_FSGetDataForkName, 1,
PyDoc_STR("() -> (HFSUniStr255 dataForkName)")},
{"FSGetResourceForkName", (PyCFunction)File_FSGetResourceForkName, 1,
PyDoc_STR("() -> (HFSUniStr255 resourceForkName)")},
{"FSPathMakeRef", (PyCFunction)File_FSPathMakeRef, 1,
PyDoc_STR("(UInt8 * path) -> (FSRef ref, Boolean isDirectory)")},
#if TARGET_API_MAC_OSX
{"FNNotifyByPath", (PyCFunction)File_FNNotifyByPath, 1,
PyDoc_STR("(UInt8 * path, FNMessage message, OptionBits flags) -> None")},
#endif
#if TARGET_API_MAC_OSX
{"FNNotifyAll", (PyCFunction)File_FNNotifyAll, 1,
PyDoc_STR("(FNMessage message, OptionBits flags) -> None")},
#endif
{"NewAlias", (PyCFunction)File_NewAlias, 1,
PyDoc_STR("(FSSpec fromFile, FSSpec target) -> (AliasHandle alias)")},
{"NewAliasMinimalFromFullPath", (PyCFunction)File_NewAliasMinimalFromFullPath, 1,
PyDoc_STR("(Buffer fullPath, Str32 zoneName, Str31 serverName) -> (AliasHandle alias)")},
{"ResolveAliasFile", (PyCFunction)File_ResolveAliasFile, 1,
PyDoc_STR("(FSSpec theSpec, Boolean resolveAliasChains) -> (FSSpec theSpec, Boolean targetIsFolder, Boolean wasAliased)")},
{"ResolveAliasFileWithMountFlags", (PyCFunction)File_ResolveAliasFileWithMountFlags, 1,
PyDoc_STR("(FSSpec theSpec, Boolean resolveAliasChains, unsigned long mountFlags) -> (FSSpec theSpec, Boolean targetIsFolder, Boolean wasAliased)")},
{"UpdateAlias", (PyCFunction)File_UpdateAlias, 1,
PyDoc_STR("(FSSpec fromFile, FSSpec target, AliasHandle alias) -> (Boolean wasChanged)")},
{"ResolveAliasFileWithMountFlagsNoUI", (PyCFunction)File_ResolveAliasFileWithMountFlagsNoUI, 1,
PyDoc_STR("(FSSpec theSpec, Boolean resolveAliasChains, unsigned long mountFlags) -> (FSSpec theSpec, Boolean targetIsFolder, Boolean wasAliased)")},
{"FSNewAlias", (PyCFunction)File_FSNewAlias, 1,
PyDoc_STR("(FSRef fromFile, FSRef target) -> (AliasHandle inAlias)")},
{"FSResolveAliasFileWithMountFlags", (PyCFunction)File_FSResolveAliasFileWithMountFlags, 1,
PyDoc_STR("(FSRef theRef, Boolean resolveAliasChains, unsigned long mountFlags) -> (FSRef theRef, Boolean targetIsFolder, Boolean wasAliased)")},
{"FSResolveAliasFile", (PyCFunction)File_FSResolveAliasFile, 1,
PyDoc_STR("(FSRef theRef, Boolean resolveAliasChains) -> (FSRef theRef, Boolean targetIsFolder, Boolean wasAliased)")},
{"FSUpdateAlias", (PyCFunction)File_FSUpdateAlias, 1,
PyDoc_STR("(FSRef fromFile, FSRef target, AliasHandle alias) -> (Boolean wasChanged)")},
{"pathname", (PyCFunction)File_pathname, 1,
PyDoc_STR("(str|unicode|FSSpec|FSref) -> pathname")},
{NULL, NULL, 0}
};
int
PyMac_GetFSSpec(PyObject *v, FSSpec *spec)
{
Str255 path;
short refnum;
long parid;
OSErr err;
FSRef fsr;
if (FSSpec_Check(v)) {
*spec = ((FSSpecObject *)v)->ob_itself;
return 1;
}
if (PyArg_Parse(v, "(hlO&)",
&refnum, &parid, PyMac_GetStr255, &path)) {
err = FSMakeFSSpec(refnum, parid, path, spec);
if ( err && err != fnfErr ) {
PyMac_Error(err);
return 0;
}
return 1;
}
PyErr_Clear();
#if !TARGET_API_MAC_OSX
/* On OS9 we now try a pathname */
if ( PyString_Check(v) ) {
/* It's a pathname */
if( !PyArg_Parse(v, "O&", PyMac_GetStr255, &path) )
return 0;
refnum = 0; /* XXXX Should get CurWD here?? */
parid = 0;
err = FSMakeFSSpec(refnum, parid, path, spec);
if ( err && err != fnfErr ) {
PyMac_Error(err);
return 0;
}
return 1;
}
PyErr_Clear();
#endif
/* Otherwise we try to go via an FSRef. On OSX we go all the way,
** on OS9 we accept only a real FSRef object
*/
#if TARGET_API_MAC_OSX
if ( PyMac_GetFSRef(v, &fsr) ) {
#else
if (FSRef_Check(v)) {
fsr = ((FSRefObject *)v)->ob_itself;
#endif
err = FSGetCatalogInfo(&fsr, kFSCatInfoNone, NULL, NULL, spec, NULL);
if (err != noErr) {
PyMac_Error(err);
return 0;
}
return 1;
}
#if !TARGET_API_MAC_OSX
PyErr_SetString(PyExc_TypeError, "FSSpec, FSRef, pathname or (refnum, parid, path) required");
#endif
return 0;
}
int
PyMac_GetFSRef(PyObject *v, FSRef *fsr)
{
OSStatus err;
FSSpec fss;
if (FSRef_Check(v)) {
*fsr = ((FSRefObject *)v)->ob_itself;
return 1;
}
#if TARGET_API_MAC_OSX
/* On OSX we now try a pathname */
if ( PyString_Check(v) || PyUnicode_Check(v)) {
char *path = NULL;
if (!PyArg_Parse(v, "et", Py_FileSystemDefaultEncoding, &path))
return 0;
if ( (err=FSPathMakeRef((unsigned char *)path, fsr, NULL)) ) {
PyMac_Error(err);
return 0;
}
return 1;
}
/* XXXX Should try unicode here too */
#endif
/* Otherwise we try to go via an FSSpec */
#if TARGET_API_MAC_OSX
if (FSSpec_Check(v)) {
fss = ((FSSpecObject *)v)->ob_itself;
#else
if (PyMac_GetFSSpec(v, &fss)) {
#endif
if ((err=FSpMakeFSRef(&fss, fsr)) == 0)
return 1;
PyMac_Error(err);
return 0;
}
PyErr_SetString(PyExc_TypeError, "FSRef, FSSpec or pathname required");
return 0;
}
extern PyObject *
PyMac_BuildFSSpec(FSSpec *spec)
{
return FSSpec_New(spec);
}
extern PyObject *
PyMac_BuildFSRef(FSRef *spec)
{
return FSRef_New(spec);
}
void init_File(void)
{
PyObject *m;
PyObject *d;
PyMac_INIT_TOOLBOX_OBJECT_NEW(FSSpec *, PyMac_BuildFSSpec);
PyMac_INIT_TOOLBOX_OBJECT_NEW(FSRef *, PyMac_BuildFSRef);
PyMac_INIT_TOOLBOX_OBJECT_CONVERT(FSSpec, PyMac_GetFSSpec);
PyMac_INIT_TOOLBOX_OBJECT_CONVERT(FSRef, PyMac_GetFSRef);
m = Py_InitModule("_File", File_methods);
d = PyModule_GetDict(m);
File_Error = PyMac_GetOSErrException();
if (File_Error == NULL ||
PyDict_SetItemString(d, "Error", File_Error) != 0)
return;
FSCatalogInfo_Type.ob_type = &PyType_Type;
if (PyType_Ready(&FSCatalogInfo_Type) < 0) return;
Py_INCREF(&FSCatalogInfo_Type);
PyModule_AddObject(m, "FSCatalogInfo", (PyObject *)&FSCatalogInfo_Type);
/* Backward-compatible name */
Py_INCREF(&FSCatalogInfo_Type);
PyModule_AddObject(m, "FSCatalogInfoType", (PyObject *)&FSCatalogInfo_Type);
FInfo_Type.ob_type = &PyType_Type;
if (PyType_Ready(&FInfo_Type) < 0) return;
Py_INCREF(&FInfo_Type);
PyModule_AddObject(m, "FInfo", (PyObject *)&FInfo_Type);
/* Backward-compatible name */
Py_INCREF(&FInfo_Type);
PyModule_AddObject(m, "FInfoType", (PyObject *)&FInfo_Type);
Alias_Type.ob_type = &PyType_Type;
if (PyType_Ready(&Alias_Type) < 0) return;
Py_INCREF(&Alias_Type);
PyModule_AddObject(m, "Alias", (PyObject *)&Alias_Type);
/* Backward-compatible name */
Py_INCREF(&Alias_Type);
PyModule_AddObject(m, "AliasType", (PyObject *)&Alias_Type);
FSSpec_Type.ob_type = &PyType_Type;
if (PyType_Ready(&FSSpec_Type) < 0) return;
Py_INCREF(&FSSpec_Type);
PyModule_AddObject(m, "FSSpec", (PyObject *)&FSSpec_Type);
/* Backward-compatible name */
Py_INCREF(&FSSpec_Type);
PyModule_AddObject(m, "FSSpecType", (PyObject *)&FSSpec_Type);
FSRef_Type.ob_type = &PyType_Type;
if (PyType_Ready(&FSRef_Type) < 0) return;
Py_INCREF(&FSRef_Type);
PyModule_AddObject(m, "FSRef", (PyObject *)&FSRef_Type);
/* Backward-compatible name */
Py_INCREF(&FSRef_Type);
PyModule_AddObject(m, "FSRefType", (PyObject *)&FSRef_Type);
}
/* ======================== End module _File ======================== */
#=======================================================================
#
# Python Lexical Analyser
#
# Actions for use in token specifications
#
#=======================================================================
class Action:
def same_as(self, other):
return self is other
class Return(Action):
"""
Internal Plex action which causes |value| to
be returned as the value of the associated token
"""
value = None
def __init__(self, value):
self.value = value
def perform(self, token_stream, text):
return self.value
def same_as(self, other):
return isinstance(other, Return) and self.value == other.value
def __repr__(self):
return "Return(%s)" % repr(self.value)
class Call(Action):
"""
Internal Plex action which causes a function to be called.
"""
function = None
def __init__(self, function):
self.function = function
def perform(self, token_stream, text):
return self.function(token_stream, text)
def __repr__(self):
return "Call(%s)" % self.function.__name__
def same_as(self, other):
return isinstance(other, Call) and self.function is other.function
class Begin(Action):
"""
Begin(state_name) is a Plex action which causes the Scanner to
enter the state |state_name|. See the docstring of Plex.Lexicon
for more information.
"""
state_name = None
def __init__(self, state_name):
self.state_name = state_name
def perform(self, token_stream, text):
token_stream.begin(self.state_name)
def __repr__(self):
return "Begin(%s)" % self.state_name
def same_as(self, other):
return isinstance(other, Begin) and self.state_name == other.state_name
class Ignore(Action):
"""
IGNORE is a Plex action which causes its associated token
to be ignored. See the docstring of Plex.Lexicon for more
information.
"""
def perform(self, token_stream, text):
return None
def __repr__(self):
return "IGNORE"
IGNORE = Ignore()
IGNORE.__doc__ = Ignore.__doc__
class Text(Action):
"""
TEXT is a Plex action which causes the text of a token to
be returned as the value of the token. See the docstring of
Plex.Lexicon for more information.
"""
def perform(self, token_stream, text):
return text
def __repr__(self):
return "TEXT"
TEXT = Text()
TEXT.__doc__ = Text.__doc__
#=======================================================================
#
# Python Lexical Analyser
#
# Converting NFA to DFA
#
#=======================================================================
import Machines
from Machines import LOWEST_PRIORITY
from Transitions import TransitionMap
def nfa_to_dfa(old_machine, debug = None):
"""
Given a nondeterministic Machine, return a new equivalent
Machine which is deterministic.
"""
# We build a new machine whose states correspond to sets of states
# in the old machine. Initially we add a new state corresponding to
# the epsilon-closure of each initial old state. Then we give transitions
# to each new state which are the union of all transitions out of any
# of the corresponding old states. The new state reached on a given
# character is the one corresponding to the set of states reachable
# on that character from any of the old states. As new combinations of
# old states are created, new states are added as needed until closure
# is reached.
new_machine = Machines.FastMachine()
state_map = StateMap(new_machine)
# Seed the process using the initial states of the old machine.
# Make the corresponding new states into initial states of the new
# machine with the same names.
for (key, old_state) in old_machine.initial_states.items():
new_state = state_map.old_to_new(epsilon_closure(old_state))
new_machine.make_initial_state(key, new_state)
# Tricky bit here: we add things to the end of this list while we're
# iterating over it. The iteration stops when closure is achieved.
for new_state in new_machine.states:
transitions = TransitionMap()
for old_state in state_map.new_to_old(new_state).keys():
for event, old_target_states in old_state.transitions.items():
if event and old_target_states:
transitions.add_set(event, set_epsilon_closure(old_target_states))
for event, old_states in transitions.items():
new_machine.add_transitions(new_state, event, state_map.old_to_new(old_states))
if debug:
debug.write("\n===== State Mapping =====\n")
state_map.dump(debug)
return new_machine
def set_epsilon_closure(state_set):
"""
Given a set of states, return the union of the epsilon
closures of its member states.
"""
result = {}
for state1 in state_set.keys():
for state2 in epsilon_closure(state1).keys():
result[state2] = 1
return result
def epsilon_closure(state):
"""
Return the set of states reachable from the given state
by epsilon moves.
"""
# Cache the result
result = state.epsilon_closure
if result is None:
result = {}
state.epsilon_closure = result
add_to_epsilon_closure(result, state)
return result
def add_to_epsilon_closure(state_set, state):
"""
Recursively add to |state_set| states reachable from the given state
by epsilon moves.
"""
if not state_set.get(state, 0):
state_set[state] = 1
state_set_2 = state.transitions.get_epsilon()
if state_set_2:
for state2 in state_set_2.keys():
add_to_epsilon_closure(state_set, state2)
class StateMap:
"""
Helper class used by nfa_to_dfa() to map back and forth between
sets of states from the old machine and states of the new machine.
"""
new_machine = None # Machine
old_to_new_dict = None # {(old_state,...) : new_state}
new_to_old_dict = None # {id(new_state) : old_state_set}
def __init__(self, new_machine):
self.new_machine = new_machine
self.old_to_new_dict = {}
self.new_to_old_dict= {}
def old_to_new(self, old_state_set):
"""
Return the state of the new machine corresponding to the
set of old machine states represented by |state_set|. A new
state will be created if necessary. If any of the old states
are accepting states, the new state will be an accepting state
with the highest priority action from the old states.
"""
key = self.make_key(old_state_set)
new_state = self.old_to_new_dict.get(key, None)
if not new_state:
action = self.highest_priority_action(old_state_set)
new_state = self.new_machine.new_state(action)
self.old_to_new_dict[key] = new_state
self.new_to_old_dict[id(new_state)] = old_state_set
#for old_state in old_state_set.keys():
#new_state.merge_actions(old_state)
return new_state
def highest_priority_action(self, state_set):
best_action = None
best_priority = LOWEST_PRIORITY
for state in state_set.keys():
priority = state.action_priority
if priority > best_priority:
best_action = state.action
best_priority = priority
return best_action
# def old_to_new_set(self, old_state_set):
# """
# Return the new state corresponding to a set of old states as
# a singleton set.
# """
# return {self.old_to_new(old_state_set):1}
def new_to_old(self, new_state):
"""Given a new state, return a set of corresponding old states."""
return self.new_to_old_dict[id(new_state)]
def make_key(self, state_set):
"""
Convert a set of states into a uniquified
sorted tuple suitable for use as a dictionary key.
"""
lst = state_set.keys()
lst.sort()
return tuple(lst)
def dump(self, file):
from Transitions import state_set_str
for new_state in self.new_machine.states:
old_state_set = self.new_to_old_dict[id(new_state)]
file.write(" State %s <-- %s\n" % (
new_state['number'], state_set_str(old_state_set)))
#=======================================================================
#
# Python Lexical Analyser
#
# Exception classes
#
#=======================================================================
import exceptions
class PlexError(exceptions.Exception):
message = ""
class PlexTypeError(PlexError, TypeError):
pass
class PlexValueError(PlexError, ValueError):
pass
class InvalidRegex(PlexError):
pass
class InvalidToken(PlexError):
def __init__(self, token_number, message):
PlexError.__init__(self, "Token number %d: %s" % (token_number, message))
class InvalidScanner(PlexError):
pass
class AmbiguousAction(PlexError):
message = "Two tokens with different actions can match the same string"
def __init__(self):
pass
class UnrecognizedInput(PlexError):
scanner = None
position = None
state_name = None
def __init__(self, scanner, state_name):
self.scanner = scanner
self.position = scanner.position()
self.state_name = state_name
def __str__(self):
return ("'%s', line %d, char %d: Token not recognised in state %s"
% (self.position + (repr(self.state_name),)))
#=======================================================================
#
# Python Lexical Analyser
#
# Lexical Analyser Specification
#
#=======================================================================
import types
import Actions
import DFA
import Errors
import Machines
import Regexps
# debug_flags for Lexicon constructor
DUMP_NFA = 1
DUMP_DFA = 2
class State:
"""
This class is used as part of a Plex.Lexicon specification to
introduce a user-defined state.
Constructor:
State(name, token_specifications)
"""
name = None
tokens = None
def __init__(self, name, tokens):
self.name = name
self.tokens = tokens
class Lexicon:
"""
Lexicon(specification) builds a lexical analyser from the given
|specification|. The specification consists of a list of
specification items. Each specification item may be either:
1) A token definition, which is a tuple:
(pattern, action)
The |pattern| is a regular axpression built using the
constructors defined in the Plex module.
The |action| is the action to be performed when this pattern
is recognised (see below).
2) A state definition:
State(name, tokens)
where |name| is a character string naming the state,
and |tokens| is a list of token definitions as
above. The meaning and usage of states is described
below.
Actions
-------
The |action| in a token specication may be one of three things:
1) A function, which is called as follows:
function(scanner, text)
where |scanner| is the relevant Scanner instance, and |text|
is the matched text. If the function returns anything
other than None, that value is returned as the value of the
token. If it returns None, scanning continues as if the IGNORE
action were specified (see below).
2) One of the following special actions:
IGNORE means that the recognised characters will be treated as
white space and ignored. Scanning will continue until
the next non-ignored token is recognised before returning.
TEXT causes the scanned text itself to be returned as the
value of the token.
3) Any other value, which is returned as the value of the token.
States
------
At any given time, the scanner is in one of a number of states.
Associated with each state is a set of possible tokens. When scanning,
only tokens associated with the current state are recognised.
There is a default state, whose name is the empty string. Token
definitions which are not inside any State definition belong to
the default state.
The initial state of the scanner is the default state. The state can
be changed in one of two ways:
1) Using Begin(state_name) as the action of a token.
2) Calling the begin(state_name) method of the Scanner.
To change back to the default state, use '' as the state name.
"""
machine = None # Machine
tables = None # StateTableMachine
def __init__(self, specifications, debug = None, debug_flags = 7, timings = None):
if type(specifications) != types.ListType:
raise Errors.InvalidScanner("Scanner definition is not a list")
if timings:
from Timing import time
total_time = 0.0
time1 = time()
nfa = Machines.Machine()
default_initial_state = nfa.new_initial_state('')
token_number = 1
for spec in specifications:
if isinstance(spec, State):
user_initial_state = nfa.new_initial_state(spec.name)
for token in spec.tokens:
self.add_token_to_machine(
nfa, user_initial_state, token, token_number)
token_number = token_number + 1
elif type(spec) == types.TupleType:
self.add_token_to_machine(
nfa, default_initial_state, spec, token_number)
token_number = token_number + 1
else:
raise Errors.InvalidToken(
token_number,
"Expected a token definition (tuple) or State instance")
if timings:
time2 = time()
total_time = total_time + (time2 - time1)
time3 = time()
if debug and (debug_flags & 1):
debug.write("\n============= NFA ===========\n")
nfa.dump(debug)
dfa = DFA.nfa_to_dfa(nfa, debug = (debug_flags & 3) == 3 and debug)
if timings:
time4 = time()
total_time = total_time + (time4 - time3)
if debug and (debug_flags & 2):
debug.write("\n============= DFA ===========\n")
dfa.dump(debug)
if timings:
timings.write("Constructing NFA : %5.2f\n" % (time2 - time1))
timings.write("Converting to DFA: %5.2f\n" % (time4 - time3))
timings.write("TOTAL : %5.2f\n" % total_time)
self.machine = dfa
def add_token_to_machine(self, machine, initial_state, token_spec, token_number):
try:
(re, action_spec) = self.parse_token_definition(token_spec)
# Disabled this -- matching empty strings can be useful
#if re.nullable:
# raise Errors.InvalidToken(
# token_number, "Pattern can match 0 input symbols")
if isinstance(action_spec, Actions.Action):
action = action_spec
elif callable(action_spec):
action = Actions.Call(action_spec)
else:
action = Actions.Return(action_spec)
final_state = machine.new_state()
re.build_machine(machine, initial_state, final_state,
match_bol = 1, nocase = 0)
final_state.set_action(action, priority = -token_number)
except Errors.PlexError, e:
raise e.__class__("Token number %d: %s" % (token_number, e))
def parse_token_definition(self, token_spec):
if type(token_spec) != types.TupleType:
raise Errors.InvalidToken("Token definition is not a tuple")
if len(token_spec) != 2:
raise Errors.InvalidToken("Wrong number of items in token definition")
pattern, action = token_spec
if not isinstance(pattern, Regexps.RE):
raise Errors.InvalidToken("Pattern is not an RE instance")
return (pattern, action)
def get_initial_state(self, name):
return self.machine.get_initial_state(name)
#=======================================================================
#
# Python Lexical Analyser
#
# Classes for building NFAs and DFAs
#
#=======================================================================
import string
import sys
from sys import maxint
from types import TupleType
from Transitions import TransitionMap
LOWEST_PRIORITY = -sys.maxint
class Machine:
"""A collection of Nodes representing an NFA or DFA."""
states = None # [Node]
next_state_number = 1
initial_states = None # {(name, bol): Node}
def __init__(self):
self.states = []
self.initial_states = {}
def __del__(self):
#print "Destroying", self ###
for state in self.states:
state.destroy()
def new_state(self):
"""Add a new state to the machine and return it."""
s = Node()
n = self.next_state_number
self.next_state_number = n + 1
s.number = n
self.states.append(s)
return s
def new_initial_state(self, name):
state = self.new_state()
self.make_initial_state(name, state)
return state
def make_initial_state(self, name, state):
self.initial_states[name] = state
def get_initial_state(self, name):
return self.initial_states[name]
def dump(self, file):
file.write("Plex.Machine:\n")
if self.initial_states is not None:
file.write(" Initial states:\n")
for (name, state) in self.initial_states.items():
file.write(" '%s': %d\n" % (name, state.number))
for s in self.states:
s.dump(file)
class Node:
"""A state of an NFA or DFA."""
transitions = None # TransitionMap
action = None # Action
action_priority = None # integer
number = 0 # for debug output
epsilon_closure = None # used by nfa_to_dfa()
def __init__(self):
# Preinitialise the list of empty transitions, because
# the nfa-to-dfa algorithm needs it
#self.transitions = {'':[]}
self.transitions = TransitionMap()
self.action_priority = LOWEST_PRIORITY
def destroy(self):
#print "Destroying", self ###
self.transitions = None
self.action = None
self.epsilon_closure = None
def add_transition(self, event, new_state):
self.transitions.add(event, new_state)
def link_to(self, state):
"""Add an epsilon-move from this state to another state."""
self.add_transition('', state)
def set_action(self, action, priority):
"""Make this an accepting state with the given action. If
there is already an action, choose the action with highest
priority."""
if priority > self.action_priority:
self.action = action
self.action_priority = priority
def get_action(self):
return self.action
def get_action_priority(self):
return self.action_priority
# def merge_actions(self, other_state):
# """Merge actions of other state into this state according
# to their priorities."""
# action = other_state.get_action()
# priority = other_state.get_action_priority()
# self.set_action(action, priority)
def is_accepting(self):
return self.action is not None
def __str__(self):
return "State %d" % self.number
def dump(self, file):
import string
# Header
file.write(" State %d:\n" % self.number)
# Transitions
# self.dump_transitions(file)
self.transitions.dump(file)
# Action
action = self.action
priority = self.action_priority
if action is not None:
file.write(" %s [priority %d]\n" % (action, priority))
class FastMachine:
"""
FastMachine is a deterministic machine represented in a way that
allows fast scanning.
"""
initial_states = None # {state_name:state}
states = None # [state]
# where state = {event:state, 'else':state, 'action':Action}
next_number = 1 # for debugging
new_state_template = {
'':None, 'bol':None, 'eol':None, 'eof':None, 'else':None
}
def __init__(self, old_machine = None):
self.initial_states = initial_states = {}
self.states = []
if old_machine:
self.old_to_new = old_to_new = {}
for old_state in old_machine.states:
new_state = self.new_state()
old_to_new[old_state] = new_state
for name, old_state in old_machine.initial_states.items():
initial_states[name] = old_to_new[old_state]
for old_state in old_machine.states:
new_state = old_to_new[old_state]
for event, old_state_set in old_state.transitions.items():
if old_state_set:
new_state[event] = old_to_new[old_state_set.keys()[0]]
else:
new_state[event] = None
new_state['action'] = old_state.action
def __del__(self):
for state in self.states:
state.clear()
def new_state(self, action = None):
number = self.next_number
self.next_number = number + 1
result = self.new_state_template.copy()
result['number'] = number
result['action'] = action
self.states.append(result)
return result
def make_initial_state(self, name, state):
self.initial_states[name] = state
def add_transitions(self, state, event, new_state):
if type(event) == TupleType:
code0, code1 = event
if code0 == -maxint:
state['else'] = new_state
elif code1 != maxint:
while code0 < code1:
state[chr(code0)] = new_state
code0 = code0 + 1
else:
state[event] = new_state
def get_initial_state(self, name):
return self.initial_states[name]
def dump(self, file):
file.write("Plex.FastMachine:\n")
file.write(" Initial states:\n")
for name, state in self.initial_states.items():
file.write(" %s: %s\n" % (repr(name), state['number']))
for state in self.states:
self.dump_state(state, file)
def dump_state(self, state, file):
import string
# Header
file.write(" State %d:\n" % state['number'])
# Transitions
self.dump_transitions(state, file)
# Action
action = state['action']
if action is not None:
file.write(" %s\n" % action)
def dump_transitions(self, state, file):
chars_leading_to_state = {}
special_to_state = {}
for (c, s) in state.items():
if len(c) == 1:
chars = chars_leading_to_state.get(id(s), None)
if chars is None:
chars = []
chars_leading_to_state[id(s)] = chars
chars.append(c)
elif len(c) <= 4:
special_to_state[c] = s
ranges_to_state = {}
for state in self.states:
char_list = chars_leading_to_state.get(id(state), None)
if char_list:
ranges = self.chars_to_ranges(char_list)
ranges_to_state[ranges] = state
ranges_list = ranges_to_state.keys()
ranges_list.sort()
for ranges in ranges_list:
key = self.ranges_to_string(ranges)
state = ranges_to_state[ranges]
file.write(" %s --> State %d\n" % (key, state['number']))
for key in ('bol', 'eol', 'eof', 'else'):
state = special_to_state.get(key, None)
if state:
file.write(" %s --> State %d\n" % (key, state['number']))
def chars_to_ranges(self, char_list):
char_list.sort()
i = 0
n = len(char_list)
result = []
while i < n:
c1 = ord(char_list[i])
c2 = c1
i = i + 1
while i < n and ord(char_list[i]) == c2 + 1:
i = i + 1
c2 = c2 + 1
result.append((chr(c1), chr(c2)))
return tuple(result)
def ranges_to_string(self, range_list):
return string.join(map(self.range_to_string, range_list), ",")
def range_to_string(self, (c1, c2)):
if c1 == c2:
return repr(c1)
else:
return "%s..%s" % (repr(c1), repr(c2))
##
## (Superseded by Machines.FastMachine)
##
## class StateTableMachine:
## """
## StateTableMachine is an alternative representation of a Machine
## that can be run more efficiently.
## """
## initial_states = None # {state_name:state_index}
## states = None # [([state] indexed by char code, Action)]
## special_map = {'bol':256, 'eol':257, 'eof':258}
## def __init__(self, m):
## """
## Initialise StateTableMachine from Machine |m|.
## """
## initial_states = self.initial_states = {}
## states = self.states = [None]
## old_to_new = {}
## i = 1
## for old_state in m.states:
## new_state = ([0] * 259, old_state.get_action())
## states.append(new_state)
## old_to_new[old_state] = i # new_state
## i = i + 1
## for name, old_state in m.initial_states.items():
## initial_states[name] = old_to_new[old_state]
## for old_state in m.states:
## new_state_index = old_to_new[old_state]
## new_table = states[new_state_index][0]
## transitions = old_state.transitions
## for c, old_targets in transitions.items():
## if old_targets:
## old_target = old_targets[0]
## new_target_index = old_to_new[old_target]
## if len(c) == 1:
## a = ord(c)
## else:
## a = self.special_map[c]
## new_table[a] = states[new_target_index]
## def dump(self, f):
## f.write("Plex.StateTableMachine:\n")
## f.write(" Initial states:\n")
## for name, index in self.initial_states.items():
## f.write(" %s: State %d\n" % (
## repr(name), id(self.states[index])))
## for i in xrange(1, len(self.states)):
## table, action = self.states[i]
## f.write(" State %d:" % i)
## if action:
## f.write("%s" % action)
## f.write("\n")
## f.write(" %s\n" % map(id,table))
#=======================================================================
#
# Python Lexical Analyser
#
# Regular Expressions
#
#=======================================================================
import array
import string
import types
from sys import maxint
import Errors
#
# Constants
#
BOL = 'bol'
EOL = 'eol'
EOF = 'eof'
nl_code = ord('\n')
#
# Helper functions
#
def chars_to_ranges(s):
"""
Return a list of character codes consisting of pairs
[code1a, code1b, code2a, code2b,...] which cover all
the characters in |s|.
"""
char_list = list(s)
char_list.sort()
i = 0
n = len(char_list)
result = []
while i < n:
code1 = ord(char_list[i])
code2 = code1 + 1
i = i + 1
while i < n and code2 >= ord(char_list[i]):
code2 = code2 + 1
i = i + 1
result.append(code1)
result.append(code2)
return result
def uppercase_range(code1, code2):
"""
If the range of characters from code1 to code2-1 includes any
lower case letters, return the corresponding upper case range.
"""
code3 = max(code1, ord('a'))
code4 = min(code2, ord('z') + 1)
if code3 < code4:
d = ord('A') - ord('a')
return (code3 + d, code4 + d)
else:
return None
def lowercase_range(code1, code2):
"""
If the range of characters from code1 to code2-1 includes any
upper case letters, return the corresponding lower case range.
"""
code3 = max(code1, ord('A'))
code4 = min(code2, ord('Z') + 1)
if code3 < code4:
d = ord('a') - ord('A')
return (code3 + d, code4 + d)
else:
return None
def CodeRanges(code_list):
"""
Given a list of codes as returned by chars_to_ranges, return
an RE which will match a character in any of the ranges.
"""
re_list = []
for i in xrange(0, len(code_list), 2):
re_list.append(CodeRange(code_list[i], code_list[i + 1]))
return apply(Alt, tuple(re_list))
def CodeRange(code1, code2):
"""
CodeRange(code1, code2) is an RE which matches any character
with a code |c| in the range |code1| <= |c| < |code2|.
"""
if code1 <= nl_code < code2:
return Alt(RawCodeRange(code1, nl_code),
RawNewline,
RawCodeRange(nl_code + 1, code2))
else:
return RawCodeRange(code1, code2)
#
# Abstract classes
#
class RE:
"""RE is the base class for regular expression constructors.
The following operators are defined on REs:
re1 + re2 is an RE which matches |re1| followed by |re2|
re1 | re2 is an RE which matches either |re1| or |re2|
"""
nullable = 1 # True if this RE can match 0 input symbols
match_nl = 1 # True if this RE can match a string ending with '\n'
str = None # Set to a string to override the class's __str__ result
def build_machine(self, machine, initial_state, final_state,
match_bol, nocase):
"""
This method should add states to |machine| to implement this
RE, starting at |initial_state| and ending at |final_state|.
If |match_bol| is true, the RE must be able to match at the
beginning of a line. If nocase is true, upper and lower case
letters should be treated as equivalent.
"""
raise exceptions.UnimplementedMethod("%s.build_machine not implemented" %
self.__class__.__name__)
def build_opt(self, m, initial_state, c):
"""
Given a state |s| of machine |m|, return a new state
reachable from |s| on character |c| or epsilon.
"""
s = m.new_state()
initial_state.link_to(s)
initial_state.add_transition(c, s)
return s
def __add__(self, other):
return Seq(self, other)
def __or__(self, other):
return Alt(self, other)
def __str__(self):
if self.str:
return self.str
else:
return self.calc_str()
def check_re(self, num, value):
if not isinstance(value, RE):
self.wrong_type(num, value, "Plex.RE instance")
def check_string(self, num, value):
if type(value) != type(''):
self.wrong_type(num, value, "string")
def check_char(self, num, value):
self.check_string(num, value)
if len(value) != 1:
raise Errors.PlexValueError("Invalid value for argument %d of Plex.%s."
"Expected a string of length 1, got: %s" % (
num, self.__class__.__name__, repr(value)))
def wrong_type(self, num, value, expected):
if type(value) == types.InstanceType:
got = "%s.%s instance" % (
value.__class__.__module__, value.__class__.__name__)
else:
got = type(value).__name__
raise Errors.PlexTypeError("Invalid type for argument %d of Plex.%s "
"(expected %s, got %s" % (
num, self.__class__.__name__, expected, got))
#
# Primitive RE constructors
# -------------------------
#
# These are the basic REs from which all others are built.
#
## class Char(RE):
## """
## Char(c) is an RE which matches the character |c|.
## """
## nullable = 0
## def __init__(self, char):
## self.char = char
## self.match_nl = char == '\n'
## def build_machine(self, m, initial_state, final_state, match_bol, nocase):
## c = self.char
## if match_bol and c != BOL:
## s1 = self.build_opt(m, initial_state, BOL)
## else:
## s1 = initial_state
## if c == '\n' or c == EOF:
## s1 = self.build_opt(m, s1, EOL)
## if len(c) == 1:
## code = ord(self.char)
## s1.add_transition((code, code+1), final_state)
## if nocase and is_letter_code(code):
## code2 = other_case_code(code)
## s1.add_transition((code2, code2+1), final_state)
## else:
## s1.add_transition(c, final_state)
## def calc_str(self):
## return "Char(%s)" % repr(self.char)
def Char(c):
"""
Char(c) is an RE which matches the character |c|.
"""
if len(c) == 1:
result = CodeRange(ord(c), ord(c) + 1)
else:
result = SpecialSymbol(c)
result.str = "Char(%s)" % repr(c)
return result
class RawCodeRange(RE):
"""
RawCodeRange(code1, code2) is a low-level RE which matches any character
with a code |c| in the range |code1| <= |c| < |code2|, where the range
does not include newline. For internal use only.
"""
nullable = 0
match_nl = 0
range = None # (code, code)
uppercase_range = None # (code, code) or None
lowercase_range = None # (code, code) or None
def __init__(self, code1, code2):
self.range = (code1, code2)
self.uppercase_range = uppercase_range(code1, code2)
self.lowercase_range = lowercase_range(code1, code2)
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
if match_bol:
initial_state = self.build_opt(m, initial_state, BOL)
initial_state.add_transition(self.range, final_state)
if nocase:
if self.uppercase_range:
initial_state.add_transition(self.uppercase_range, final_state)
if self.lowercase_range:
initial_state.add_transition(self.lowercase_range, final_state)
def calc_str(self):
return "CodeRange(%d,%d)" % (self.code1, self.code2)
class _RawNewline(RE):
"""
RawNewline is a low-level RE which matches a newline character.
For internal use only.
"""
nullable = 0
match_nl = 1
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
if match_bol:
initial_state = self.build_opt(m, initial_state, BOL)
s = self.build_opt(m, initial_state, EOL)
s.add_transition((nl_code, nl_code + 1), final_state)
RawNewline = _RawNewline()
class SpecialSymbol(RE):
"""
SpecialSymbol(sym) is an RE which matches the special input
symbol |sym|, which is one of BOL, EOL or EOF.
"""
nullable = 0
match_nl = 0
sym = None
def __init__(self, sym):
self.sym = sym
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
# Sequences 'bol bol' and 'bol eof' are impossible, so only need
# to allow for bol if sym is eol
if match_bol and self.sym == EOL:
initial_state = self.build_opt(m, initial_state, BOL)
initial_state.add_transition(self.sym, final_state)
class Seq(RE):
"""Seq(re1, re2, re3...) is an RE which matches |re1| followed by
|re2| followed by |re3|..."""
def __init__(self, *re_list):
nullable = 1
for i in xrange(len(re_list)):
re = re_list[i]
self.check_re(i, re)
nullable = nullable and re.nullable
self.re_list = re_list
self.nullable = nullable
i = len(re_list)
match_nl = 0
while i:
i = i - 1
re = re_list[i]
if re.match_nl:
match_nl = 1
break
if not re.nullable:
break
self.match_nl = match_nl
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
re_list = self.re_list
if len(re_list) == 0:
initial_state.link_to(final_state)
else:
s1 = initial_state
n = len(re_list)
for i in xrange(n):
if i < n - 1:
s2 = m.new_state()
else:
s2 = final_state
re = re_list[i]
re.build_machine(m, s1, s2, match_bol, nocase)
s1 = s2
match_bol = re.match_nl or (match_bol and re.nullable)
def calc_str(self):
return "Seq(%s)" % string.join(map(str, self.re_list), ",")
class Alt(RE):
"""Alt(re1, re2, re3...) is an RE which matches either |re1| or
|re2| or |re3|..."""
def __init__(self, *re_list):
self.re_list = re_list
nullable = 0
match_nl = 0
nullable_res = []
non_nullable_res = []
i = 1
for re in re_list:
self.check_re(i, re)
if re.nullable:
nullable_res.append(re)
nullable = 1
else:
non_nullable_res.append(re)
if re.match_nl:
match_nl = 1
i = i + 1
self.nullable_res = nullable_res
self.non_nullable_res = non_nullable_res
self.nullable = nullable
self.match_nl = match_nl
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
for re in self.nullable_res:
re.build_machine(m, initial_state, final_state, match_bol, nocase)
if self.non_nullable_res:
if match_bol:
initial_state = self.build_opt(m, initial_state, BOL)
for re in self.non_nullable_res:
re.build_machine(m, initial_state, final_state, 0, nocase)
def calc_str(self):
return "Alt(%s)" % string.join(map(str, self.re_list), ",")
class Rep1(RE):
"""Rep1(re) is an RE which matches one or more repetitions of |re|."""
def __init__(self, re):
self.check_re(1, re)
self.re = re
self.nullable = re.nullable
self.match_nl = re.match_nl
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
s1 = m.new_state()
s2 = m.new_state()
initial_state.link_to(s1)
self.re.build_machine(m, s1, s2, match_bol or self.re.match_nl, nocase)
s2.link_to(s1)
s2.link_to(final_state)
def calc_str(self):
return "Rep1(%s)" % self.re
class SwitchCase(RE):
"""
SwitchCase(re, nocase) is an RE which matches the same strings as RE,
but treating upper and lower case letters according to |nocase|. If
|nocase| is true, case is ignored, otherwise it is not.
"""
re = None
nocase = None
def __init__(self, re, nocase):
self.re = re
self.nocase = nocase
self.nullable = re.nullable
self.match_nl = re.match_nl
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
self.re.build_machine(m, initial_state, final_state, match_bol,
self.nocase)
def calc_str(self):
if self.nocase:
name = "NoCase"
else:
name = "Case"
return "%s(%s)" % (name, self.re)
#
# Composite RE constructors
# -------------------------
#
# These REs are defined in terms of the primitive REs.
#
Empty = Seq()
Empty.__doc__ = \
"""
Empty is an RE which matches the empty string.
"""
Empty.str = "Empty"
def Str1(s):
"""
Str1(s) is an RE which matches the literal string |s|.
"""
result = apply(Seq, tuple(map(Char, s)))
result.str = "Str(%s)" % repr(s)
return result
def Str(*strs):
"""
Str(s) is an RE which matches the literal string |s|.
Str(s1, s2, s3, ...) is an RE which matches any of |s1| or |s2| or |s3|...
"""
if len(strs) == 1:
return Str1(strs[0])
else:
result = apply(Alt, tuple(map(Str1, strs)))
result.str = "Str(%s)" % string.join(map(repr, strs), ",")
return result
def Any(s):
"""
Any(s) is an RE which matches any character in the string |s|.
"""
#result = apply(Alt, tuple(map(Char, s)))
result = CodeRanges(chars_to_ranges(s))
result.str = "Any(%s)" % repr(s)
return result
def AnyBut(s):
"""
AnyBut(s) is an RE which matches any character (including
newline) which is not in the string |s|.
"""
ranges = chars_to_ranges(s)
ranges.insert(0, -maxint)
ranges.append(maxint)
result = CodeRanges(ranges)
result.str = "AnyBut(%s)" % repr(s)
return result
AnyChar = AnyBut("")
AnyChar.__doc__ = \
"""
AnyChar is an RE which matches any single character (including a newline).
"""
AnyChar.str = "AnyChar"
def Range(s1, s2 = None):
"""
Range(c1, c2) is an RE which matches any single character in the range
|c1| to |c2| inclusive.
Range(s) where |s| is a string of even length is an RE which matches
any single character in the ranges |s[0]| to |s[1]|, |s[2]| to |s[3]|,...
"""
if s2:
result = CodeRange(ord(s1), ord(s2) + 1)
result.str = "Range(%s,%s)" % (s1, s2)
else:
ranges = []
for i in range(0, len(s1), 2):
ranges.append(CodeRange(ord(s1[i]), ord(s1[i+1]) + 1))
result = apply(Alt, tuple(ranges))
result.str = "Range(%s)" % repr(s1)
return result
def Opt(re):
"""
Opt(re) is an RE which matches either |re| or the empty string.
"""
result = Alt(re, Empty)
result.str = "Opt(%s)" % re
return result
def Rep(re):
"""
Rep(re) is an RE which matches zero or more repetitions of |re|.
"""
result = Opt(Rep1(re))
result.str = "Rep(%s)" % re
return result
def NoCase(re):
"""
NoCase(re) is an RE which matches the same strings as RE, but treating
upper and lower case letters as equivalent.
"""
return SwitchCase(re, nocase = 1)
def Case(re):
"""
Case(re) is an RE which matches the same strings as RE, but treating
upper and lower case letters as distinct, i.e. it cancels the effect
of any enclosing NoCase().
"""
return SwitchCase(re, nocase = 0)
#
# RE Constants
#
Bol = Char(BOL)
Bol.__doc__ = \
"""
Bol is an RE which matches the beginning of a line.
"""
Bol.str = "Bol"
Eol = Char(EOL)
Eol.__doc__ = \
"""
Eol is an RE which matches the end of a line.
"""
Eol.str = "Eol"
Eof = Char(EOF)
Eof.__doc__ = \
"""
Eof is an RE which matches the end of the file.
"""
Eof.str = "Eof"
#=======================================================================
#
# Python Lexical Analyser
#
#
# Scanning an input stream
#
#=======================================================================
import Errors
from Regexps import BOL, EOL, EOF
class Scanner:
"""
A Scanner is used to read tokens from a stream of characters
using the token set specified by a Plex.Lexicon.
Constructor:
Scanner(lexicon, stream, name = '')
See the docstring of the __init__ method for details.
Methods:
See the docstrings of the individual methods for more
information.
read() --> (value, text)
Reads the next lexical token from the stream.
position() --> (name, line, col)
Returns the position of the last token read using the
read() method.
begin(state_name)
Causes scanner to change state.
produce(value [, text])
Causes return of a token value to the caller of the
Scanner.
"""
lexicon = None # Lexicon
stream = None # file-like object
name = ''
buffer = ''
buf_start_pos = 0 # position in input of start of buffer
next_pos = 0 # position in input of next char to read
cur_pos = 0 # position in input of current char
cur_line = 1 # line number of current char
cur_line_start = 0 # position in input of start of current line
start_pos = 0 # position in input of start of token
start_line = 0 # line number of start of token
start_col = 0 # position in line of start of token
text = None # text of last token read
initial_state = None # Node
state_name = '' # Name of initial state
queue = None # list of tokens to be returned
trace = 0
def __init__(self, lexicon, stream, name = ''):
"""
Scanner(lexicon, stream, name = '')
|lexicon| is a Plex.Lexicon instance specifying the lexical tokens
to be recognised.
|stream| can be a file object or anything which implements a
compatible read() method.
|name| is optional, and may be the name of the file being
scanned or any other identifying string.
"""
self.lexicon = lexicon
self.stream = stream
self.name = name
self.queue = []
self.initial_state = None
self.begin('')
self.next_pos = 0
self.cur_pos = 0
self.cur_line_start = 0
self.cur_char = BOL
self.input_state = 1
def read(self):
"""
Read the next lexical token from the stream and return a
tuple (value, text), where |value| is the value associated with
the token as specified by the Lexicon, and |text| is the actual
string read from the stream. Returns (None, '') on end of file.
"""
queue = self.queue
while not queue:
self.text, action = self.scan_a_token()
if action is None:
self.produce(None)
self.eof()
else:
value = action.perform(self, self.text)
if value is not None:
self.produce(value)
result = queue[0]
del queue[0]
return result
def scan_a_token(self):
"""
Read the next input sequence recognised by the machine
and return (text, action). Returns ('', None) on end of
file.
"""
self.start_pos = self.cur_pos
self.start_line = self.cur_line
self.start_col = self.cur_pos - self.cur_line_start
# if self.trace:
# action = self.run_machine()
# else:
# action = self.run_machine_inlined()
action = self.run_machine_inlined()
if action:
if self.trace:
print("Scanner: read: Performing %s %d:%d" % (
action, self.start_pos, self.cur_pos))
base = self.buf_start_pos
text = self.buffer[self.start_pos - base : self.cur_pos - base]
return (text, action)
else:
if self.cur_pos == self.start_pos:
if self.cur_char == EOL:
self.next_char()
if not self.cur_char or self.cur_char == EOF:
return ('', None)
raise Errors.UnrecognizedInput(self, self.state_name)
def run_machine(self):
"""
Run the machine until no more transitions are possible.
"""
self.state = self.initial_state
self.backup_state = None
while self.transition():
pass
return self.back_up()
def run_machine_inlined(self):
"""
Inlined version of run_machine for speed.
"""
state = self.initial_state
cur_pos = self.cur_pos
cur_line = self.cur_line
cur_line_start = self.cur_line_start
cur_char = self.cur_char
input_state = self.input_state
next_pos = self.next_pos
buffer = self.buffer
buf_start_pos = self.buf_start_pos
buf_len = len(buffer)
backup_state = None
trace = self.trace
while 1:
if trace: #TRACE#
print("State %d, %d/%d:%s -->" % ( #TRACE#
state['number'], input_state, cur_pos, repr(cur_char))) #TRACE#
# Begin inlined self.save_for_backup()
#action = state.action #@slow
action = state['action'] #@fast
if action:
backup_state = (
action, cur_pos, cur_line, cur_line_start, cur_char, input_state, next_pos)
# End inlined self.save_for_backup()
c = cur_char
#new_state = state.new_state(c) #@slow
new_state = state.get(c, -1) #@fast
if new_state == -1: #@fast
new_state = c and state.get('else') #@fast
if new_state:
if trace: #TRACE#
print("State %d" % new_state['number']) #TRACE#
state = new_state
# Begin inlined: self.next_char()
if input_state == 1:
cur_pos = next_pos
# Begin inlined: c = self.read_char()
buf_index = next_pos - buf_start_pos
if buf_index < buf_len:
c = buffer[buf_index]
next_pos = next_pos + 1
else:
discard = self.start_pos - buf_start_pos
data = self.stream.read(0x1000)
buffer = self.buffer[discard:] + data
self.buffer = buffer
buf_start_pos = buf_start_pos + discard
self.buf_start_pos = buf_start_pos
buf_len = len(buffer)
buf_index = buf_index - discard
if data:
c = buffer[buf_index]
next_pos = next_pos + 1
else:
c = ''
# End inlined: c = self.read_char()
if c == '\n':
cur_char = EOL
input_state = 2
elif not c:
cur_char = EOL
input_state = 4
else:
cur_char = c
elif input_state == 2:
cur_char = '\n'
input_state = 3
elif input_state == 3:
cur_line = cur_line + 1
cur_line_start = cur_pos = next_pos
cur_char = BOL
input_state = 1
elif input_state == 4:
cur_char = EOF
input_state = 5
else: # input_state = 5
cur_char = ''
# End inlined self.next_char()
else: # not new_state
if trace: #TRACE#
print("blocked") #TRACE#
# Begin inlined: action = self.back_up()
if backup_state:
(action, cur_pos, cur_line, cur_line_start,
cur_char, input_state, next_pos) = backup_state
else:
action = None
break # while 1
# End inlined: action = self.back_up()
self.cur_pos = cur_pos
self.cur_line = cur_line
self.cur_line_start = cur_line_start
self.cur_char = cur_char
self.input_state = input_state
self.next_pos = next_pos
if trace: #TRACE#
if action: #TRACE#
print("Doing " + action) #TRACE#
return action
# def transition(self):
# self.save_for_backup()
# c = self.cur_char
# new_state = self.state.new_state(c)
# if new_state:
# if self.trace:
# print "Scanner: read: State %d: %s --> State %d" % (
# self.state.number, repr(c), new_state.number)
# self.state = new_state
# self.next_char()
# return 1
# else:
# if self.trace:
# print "Scanner: read: State %d: %s --> blocked" % (
# self.state.number, repr(c))
# return 0
# def save_for_backup(self):
# action = self.state.get_action()
# if action:
# if self.trace:
# print "Scanner: read: Saving backup point at", self.cur_pos
# self.backup_state = (
# action, self.cur_pos, self.cur_line, self.cur_line_start,
# self.cur_char, self.input_state, self.next_pos)
# def back_up(self):
# backup_state = self.backup_state
# if backup_state:
# (action, self.cur_pos, self.cur_line, self.cur_line_start,
# self.cur_char, self.input_state, self.next_pos) = backup_state
# if self.trace:
# print "Scanner: read: Backing up to", self.cur_pos
# return action
# else:
# return None
def next_char(self):
input_state = self.input_state
if self.trace:
print("Scanner: next: %s [%d] %d" % (" "*20, input_state, self.cur_pos))
if input_state == 1:
self.cur_pos = self.next_pos
c = self.read_char()
if c == '\n':
self.cur_char = EOL
self.input_state = 2
elif not c:
self.cur_char = EOL
self.input_state = 4
else:
self.cur_char = c
elif input_state == 2:
self.cur_char = '\n'
self.input_state = 3
elif input_state == 3:
self.cur_line = self.cur_line + 1
self.cur_line_start = self.cur_pos = self.next_pos
self.cur_char = BOL
self.input_state = 1
elif input_state == 4:
self.cur_char = EOF
self.input_state = 5
else: # input_state = 5
self.cur_char = ''
if self.trace:
print("--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char)))
# def read_char(self):
# """
# Get the next input character, filling the buffer if necessary.
# Returns '' at end of file.
# """
# next_pos = self.next_pos
# buf_index = next_pos - self.buf_start_pos
# if buf_index == len(self.buffer):
# discard = self.start_pos - self.buf_start_pos
# data = self.stream.read(0x1000)
# self.buffer = self.buffer[discard:] + data
# self.buf_start_pos = self.buf_start_pos + discard
# buf_index = buf_index - discard
# if not data:
# return ''
# c = self.buffer[buf_index]
# self.next_pos = next_pos + 1
# return c
def position(self):
"""
Return a tuple (name, line, col) representing the location of
the last token read using the read() method. |name| is the
name that was provided to the Scanner constructor; |line|
is the line number in the stream (1-based); |col| is the
position within the line of the first character of the token
(0-based).
"""
return (self.name, self.start_line, self.start_col)
def begin(self, state_name):
"""Set the current state of the scanner to the named state."""
self.initial_state = (
self.lexicon.get_initial_state(state_name))
self.state_name = state_name
def produce(self, value, text = None):
"""
Called from an action procedure, causes |value| to be returned
as the token value from read(). If |text| is supplied, it is
returned in place of the scanned text.
produce() can be called more than once during a single call to an action
procedure, in which case the tokens are queued up and returned one
at a time by subsequent calls to read(), until the queue is empty,
whereupon scanning resumes.
"""
if text is None:
text = self.text
self.queue.append((value, text))
def eof(self):
"""
Override this method if you want something to be done at
end of file.
"""
# For backward compatibility:
setattr(Scanner, "yield", Scanner.produce)
#
# Get time in platform-dependent way
#
import os
from sys import platform, exit, stderr
if platform == 'mac':
import MacOS
def time():
return MacOS.GetTicks() / 60.0
timekind = "real"
elif hasattr(os, 'times'):
def time():
t = os.times()
return t[0] + t[1]
timekind = "cpu"
else:
stderr.write(
"Don't know how to get time on platform %s\n" % repr(platform))
exit(1)
#=======================================================================
#
# Python Lexical Analyser
#
# Traditional Regular Expression Syntax
#
#=======================================================================
from Regexps import *
from Errors import PlexError
class RegexpSyntaxError(PlexError):
pass
def re(s):
"""
Convert traditional string representation of regular expression |s|
into Plex representation.
"""
return REParser(s).parse_re()
class REParser:
def __init__(self, s):
self.s = s
self.i = -1
self.end = 0
self.next()
def parse_re(self):
re = self.parse_alt()
if not self.end:
self.error("Unexpected %s" % repr(self.c))
return re
def parse_alt(self):
"""Parse a set of alternative regexps."""
re = self.parse_seq()
if self.c == '|':
re_list = [re]
while self.c == '|':
self.next()
re_list.append(self.parse_seq())
re = apply(Alt, tuple(re_list))
return re
def parse_seq(self):
"""Parse a sequence of regexps."""
re_list = []
while not self.end and not self.c in "|)":
re_list.append(self.parse_mod())
return apply(Seq, tuple(re_list))
def parse_mod(self):
"""Parse a primitive regexp followed by *, +, ? modifiers."""
re = self.parse_prim()
while not self.end and self.c in "*+?":
if self.c == '*':
re = Rep(re)
elif self.c == '+':
re = Rep1(re)
else: # self.c == '?'
re = Opt(re)
self.next()
return re
def parse_prim(self):
"""Parse a primitive regexp."""
c = self.get()
if c == '.':
re = AnyBut("\n")
elif c == '^':
re = Bol
elif c == '$':
re = Eol
elif c == '(':
re = self.parse_alt()
self.expect(')')
elif c == '[':
re = self.parse_charset()
self.expect(']')
else:
if c == '\\':
c = self.get()
re = Char(c)
return re
def parse_charset(self):
"""Parse a charset. Does not include the surrounding []."""
char_list = []
invert = 0
if self.c == '^':
invert = 1
self.next()
if self.c == ']':
char_list.append(']')
self.next()
while not self.end and self.c != ']':
c1 = self.get()
if self.c == '-' and self.lookahead(1) != ']':
self.next()
c2 = self.get()
for a in xrange(ord(c1), ord(c2) + 1):
char_list.append(chr(a))
else:
char_list.append(c1)
chars = string.join(char_list, "")
if invert:
return AnyBut(chars)
else:
return Any(chars)
def next(self):
"""Advance to the next char."""
s = self.s
i = self.i = self.i + 1
if i < len(s):
self.c = s[i]
else:
self.c = ''
self.end = 1
def get(self):
if self.end:
self.error("Premature end of string")
c = self.c
self.next()
return c
def lookahead(self, n):
"""Look ahead n chars."""
j = self.i + n
if j < len(self.s):
return self.s[j]
else:
return ''
def expect(self, c):
"""
Expect to find character |c| at current position.
Raises an exception otherwise.
"""
if self.c == c:
self.next()
else:
self.error("Missing %s" % repr(c))
def error(self, mess):
"""Raise exception to signal syntax error in regexp."""
raise RegexpSyntaxError("Syntax error in regexp %s at position %d: %s" % (
repr(self.s), self.i, mess))
#
# Plex - Transition Maps
#
# This version represents state sets direcly as dicts
# for speed.
#
from copy import copy
import string
from sys import maxint
from types import TupleType
class TransitionMap:
"""
A TransitionMap maps an input event to a set of states.
An input event is one of: a range of character codes,
the empty string (representing an epsilon move), or one
of the special symbols BOL, EOL, EOF.
For characters, this implementation compactly represents
the map by means of a list:
[code_0, states_0, code_1, states_1, code_2, states_2,
..., code_n-1, states_n-1, code_n]
where |code_i| is a character code, and |states_i| is a
set of states corresponding to characters with codes |c|
in the range |code_i| <= |c| <= |code_i+1|.
The following invariants hold:
n >= 1
code_0 == -maxint
code_n == maxint
code_i < code_i+1 for i in 0..n-1
states_0 == states_n-1
Mappings for the special events '', BOL, EOL, EOF are
kept separately in a dictionary.
"""
map = None # The list of codes and states
special = None # Mapping for special events
def __init__(self, map = None, special = None):
if not map:
map = [-maxint, {}, maxint]
if not special:
special = {}
self.map = map
self.special = special
#self.check() ###
def add(self, event, new_state,
TupleType = TupleType):
"""
Add transition to |new_state| on |event|.
"""
if type(event) == TupleType:
code0, code1 = event
i = self.split(code0)
j = self.split(code1)
map = self.map
while i < j:
map[i + 1][new_state] = 1
i = i + 2
else:
self.get_special(event)[new_state] = 1
def add_set(self, event, new_set,
TupleType = TupleType):
"""
Add transitions to the states in |new_set| on |event|.
"""
if type(event) == TupleType:
code0, code1 = event
i = self.split(code0)
j = self.split(code1)
map = self.map
while i < j:
map[i + 1].update(new_set)
i = i + 2
else:
self.get_special(event).update(new_set)
def get_epsilon(self,
none = None):
"""
Return the mapping for epsilon, or None.
"""
return self.special.get('', none)
def items(self,
len = len):
"""
Return the mapping as a list of ((code1, code2), state_set) and
(special_event, state_set) pairs.
"""
result = []
map = self.map
else_set = map[1]
i = 0
n = len(map) - 1
code0 = map[0]
while i < n:
set = map[i + 1]
code1 = map[i + 2]
if set or else_set:
result.append(((code0, code1), set))
code0 = code1
i = i + 2
for event, set in self.special.items():
if set:
result.append((event, set))
return result
# ------------------- Private methods --------------------
def split(self, code,
len = len, maxint = maxint):
"""
Search the list for the position of the split point for |code|,
inserting a new split point if necessary. Returns index |i| such
that |code| == |map[i]|.
"""
# We use a funky variation on binary search.
map = self.map
hi = len(map) - 1
# Special case: code == map[-1]
if code == maxint:
return hi
# General case
lo = 0
# loop invariant: map[lo] <= code < map[hi] and hi - lo >= 2
while hi - lo >= 4:
# Find midpoint truncated to even index
mid = ((lo + hi) / 2) & ~1
if code < map[mid]:
hi = mid
else:
lo = mid
# map[lo] <= code < map[hi] and hi - lo == 2
if map[lo] == code:
return lo
else:
map[hi:hi] = [code, map[hi - 1].copy()]
#self.check() ###
return hi
def get_special(self, event):
"""
Get state set for special event, adding a new entry if necessary.
"""
special = self.special
set = special.get(event, None)
if not set:
set = {}
special[event] = set
return set
# --------------------- Conversion methods -----------------------
def __str__(self):
map_strs = []
map = self.map
n = len(map)
i = 0
while i < n:
code = map[i]
if code == -maxint:
code_str = "-inf"
elif code == maxint:
code_str = "inf"
else:
code_str = str(code)
map_strs.append(code_str)
i = i + 1
if i < n:
map_strs.append(state_set_str(map[i]))
i = i + 1
special_strs = {}
for event, set in self.special.items():
special_strs[event] = state_set_str(set)
return "[%s]+%s" % (
string.join(map_strs, ","),
special_strs
)
# --------------------- Debugging methods -----------------------
def check(self):
"""Check data structure integrity."""
if not self.map[-3] < self.map[-1]:
print(self)
assert 0
def dump(self, file):
map = self.map
i = 0
n = len(map) - 1
while i < n:
self.dump_range(map[i], map[i + 2], map[i + 1], file)
i = i + 2
for event, set in self.special.items():
if set:
if not event:
event = 'empty'
self.dump_trans(event, set, file)
def dump_range(self, code0, code1, set, file):
if set:
if code0 == -maxint:
if code1 == maxint:
k = "any"
else:
k = "< %s" % self.dump_char(code1)
elif code1 == maxint:
k = "> %s" % self.dump_char(code0 - 1)
elif code0 == code1 - 1:
k = self.dump_char(code0)
else:
k = "%s..%s" % (self.dump_char(code0),
self.dump_char(code1 - 1))
self.dump_trans(k, set, file)
def dump_char(self, code):
if 0 <= code <= 255:
return repr(chr(code))
else:
return "chr(%d)" % code
def dump_trans(self, key, set, file):
file.write(" %s --> %s\n" % (key, self.dump_set(set)))
def dump_set(self, set):
return state_set_str(set)
#
# State set manipulation functions
#
#def merge_state_sets(set1, set2):
# for state in set2.keys():
# set1[state] = 1
def state_set_str(set):
state_list = set.keys()
str_list = []
for state in state_list:
str_list.append("S%d" % state.number)
return "[%s]" % string.join(str_list, ",")
#=======================================================================
#
# Python Lexical Analyser
#
#=======================================================================
"""
The Plex module provides lexical analysers with similar capabilities
to GNU Flex. The following classes and functions are exported;
see the attached docstrings for more information.
Scanner For scanning a character stream under the
direction of a Lexicon.
Lexicon For constructing a lexical definition
to be used by a Scanner.
Str, Any, AnyBut, AnyChar, Seq, Alt, Opt, Rep, Rep1,
Bol, Eol, Eof, Empty
Regular expression constructors, for building pattern
definitions for a Lexicon.
State For defining scanner states when creating a
Lexicon.
TEXT, IGNORE, Begin
Actions for associating with patterns when
creating a Lexicon.
"""
from Actions import TEXT, IGNORE, Begin
from Lexicons import Lexicon, State
from Regexps import RE, Seq, Alt, Rep1, Empty, Str, Any, AnyBut, AnyChar, Range
from Regexps import Opt, Rep, Bol, Eol, Eof, Case, NoCase
from Scanners import Scanner
import sys
sys.stderr = sys.stdout
from TransitionMaps import TransitionMap
m = TransitionMap()
print m
def add(c, s):
print
print "adding", repr(c), "-->", repr(s)
m.add_transition(c, s)
print m
print "keys:", m.keys()
add('a','alpha')
add('e', 'eta')
add('f', 'foo')
add('i', 'iota')
add('i', 'imp')
add('eol', 'elephant')
#
# Pyrex - Linux system interface
#
verbose = 0
gcc_pendantic = True
gcc_warnings_are_errors = True
gcc_all_warnings = True
import os, sys
from Cython.Utils import replace_suffix
from Cython.Compiler.Errors import PyrexError
version = "%s.%s" % sys.version[:2]
py_include_dirs = [
"%s/include/python%s" % (sys.prefix, version)
]
compilers = ["gcc", "g++"]
compiler_options = \
"-g -c -fno-strict-aliasing -Wno-long-double -no-cpp-precomp " \
"-mno-fused-madd -fno-common -dynamic " \
.split()
if gcc_pendantic:
compiler_options.extend(["-pedantic", "-Wno-long-long"])
if gcc_warnings_are_errors:
compiler_options.append("-Werror")
if gcc_all_warnings:
compiler_options.append("-Wall")
compiler_options.append("-Wno-unused-function")
linkers = ["gcc", "g++"]
linker_options = \
"-shared" \
.split()
class CCompilerError(PyrexError):
pass
def c_compile(c_file, verbose_flag = 0, cplus = 0, obj_suffix = ".o"):
# Compile the given C source file to produce
# an object file. Returns the pathname of the
# resulting file.
c_file = os.path.join(os.getcwd(), c_file)
o_file = replace_suffix(c_file, obj_suffix)
include_options = []
for dir in py_include_dirs:
include_options.append("-I%s" % dir)
compiler = compilers[bool(cplus)]
args = [compiler] + compiler_options + include_options + [c_file, "-o", o_file]
if verbose_flag or verbose:
print(" ".join(args))
#print compiler, args ###
status = os.spawnvp(os.P_WAIT, compiler, args)
if status != 0:
raise CCompilerError("C compiler returned status %s" % status)
return o_file
def c_link(obj_file, verbose_flag = 0, extra_objects = [], cplus = 0):
return c_link_list([obj_file] + extra_objects, verbose_flag, cplus)
def c_link_list(obj_files, verbose_flag = 0, cplus = 0):
# Link the given object files into a dynamically
# loadable extension file. Returns the pathname
# of the resulting file.
out_file = replace_suffix(obj_files[0], ".so")
linker = linkers[bool(cplus)]
args = [linker] + linker_options + obj_files + ["-o", out_file]
if verbose_flag or verbose:
print(" ".join(args))
status = os.spawnvp(os.P_WAIT, linker, args)
if status != 0:
raise CCompilerError("Linker returned status %s" % status)
return out_file
#
# Cython -- Things that don't belong
# anywhere else in particular
#
import os, sys
def replace_suffix(path, newsuf):
base, _ = os.path.splitext(path)
return base + newsuf
def open_new_file(path):
# Open and truncate existing file to
# preserve metadata on the Mac.
return open(path, "w+")
def castrate_file(path, st):
# Remove junk contents from an output file after a
# failed compilation, but preserve metadata on Mac.
# Also sets access and modification times back to
# those specified by st (a stat struct).
try:
f = open(path, "r+")
except EnvironmentError:
pass
else:
#st = os.stat(path)
f.seek(0, 0)
f.truncate()
f.write(
"#error Do not use this file, it is the result of a failed Pyrex compilation.\n")
f.close()
if st:
os.utime(path, (st.st_atime, st.st_mtime))
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment