Commit 719f7569 authored by Vitja Makarov's avatar Vitja Makarov

Merge remote branch 'upstream/master'

parents e7f05c6c 1f735bf4
......@@ -987,8 +987,8 @@ class BytesNode(ConstNode):
if not self.can_coerce_to_char_literal():
error(self.pos, "Only single-character string literals can be coerced into ints.")
return self
if dst_type is PyrexTypes.c_py_unicode_type:
error(self.pos, "Bytes literals cannot coerce to Py_UNICODE, use a unicode literal instead.")
if dst_type.is_unicode_char:
error(self.pos, "Bytes literals cannot coerce to Py_UNICODE/Py_UCS4, use a unicode literal instead.")
return self
return CharNode(self.pos, value=self.value)
......@@ -1039,17 +1039,17 @@ class UnicodeNode(PyConstNode):
def coerce_to(self, dst_type, env):
if dst_type is self.type:
pass
elif dst_type is PyrexTypes.c_py_unicode_type:
elif dst_type.is_unicode_char:
if not self.can_coerce_to_char_literal():
error(self.pos, "Only single-character Unicode string literals can be coerced into Py_UNICODE.")
error(self.pos, "Only single-character Unicode string literals or surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.")
return self
int_value = ord(self.value)
return IntNode(self.pos, value=int_value, constant_result=int_value)
return IntNode(self.pos, type=dst_type, value=str(int_value), constant_result=int_value)
elif not dst_type.is_pyobject:
if dst_type.is_string and self.bytes_value is not None:
# special case: '-3' enforced unicode literal used in a C char* context
return BytesNode(self.pos, value=self.bytes_value).coerce_to(dst_type, env)
error(self.pos, "Unicode literals do not support coercion to C types other than Py_UNICODE.")
error(self.pos, "Unicode literals do not support coercion to C types other than Py_UNICODE or Py_UCS4.")
elif dst_type is not py_object_type:
if not self.check_for_coercion_error(dst_type):
self.fail_assignment(dst_type)
......@@ -1057,6 +1057,9 @@ class UnicodeNode(PyConstNode):
def can_coerce_to_char_literal(self):
return len(self.value) == 1
## or (len(self.value) == 2
## and (0xD800 <= self.value[0] <= 0xDBFF)
## and (0xDC00 <= self.value[1] <= 0xDFFF))
def contains_surrogates(self):
# Check if the unicode string contains surrogate code points
......@@ -2021,6 +2024,13 @@ class IndexNode(ExprNode):
def is_ephemeral(self):
return self.base.is_ephemeral()
def is_simple(self):
if self.is_buffer_access:
return False
base = self.base
return (base.is_simple() and self.index.is_simple()
and base.type and (base.type.is_ptr or base.type.is_array))
def analyse_target_declaration(self, env):
pass
......@@ -2165,8 +2175,8 @@ class IndexNode(ExprNode):
elif not skip_child_analysis:
self.index.analyse_types(env)
self.original_index_type = self.index.type
if base_type is PyrexTypes.c_py_unicode_type:
# we infer Py_UNICODE for unicode strings in some
if base_type.is_unicode_char:
# we infer Py_UNICODE/Py_UCS4 for unicode strings in some
# cases, but indexing must still work for them
if self.index.constant_result in (0, -1):
# FIXME: we know that this node is redundant -
......@@ -2188,7 +2198,7 @@ class IndexNode(ExprNode):
self.index = self.index.coerce_to_pyobject(env)
self.is_temp = 1
if self.index.type.is_int and base_type is unicode_type:
# Py_UNICODE will automatically coerce to a unicode string
# Py_UNICODE/Py_UCS4 will automatically coerce to a unicode string
# if required, so this is fast and safe
self.type = PyrexTypes.c_py_unicode_type
elif is_slice and base_type in (bytes_type, str_type, unicode_type, list_type, tuple_type):
......@@ -2253,7 +2263,7 @@ class IndexNode(ExprNode):
return "PyList_GET_ITEM(%s, %s)" % (self.base.result(), self.index.result())
elif self.base.type is tuple_type:
return "PyTuple_GET_ITEM(%s, %s)" % (self.base.result(), self.index.result())
elif self.base.type is unicode_type and self.type is PyrexTypes.c_py_unicode_type:
elif self.base.type is unicode_type and self.type.is_unicode_char:
return "PyUnicode_AS_UNICODE(%s)[%s]" % (self.base.result(), self.index.result())
elif (self.type.is_ptr or self.type.is_array) and self.type == self.base.type:
error(self.pos, "Invalid use of pointer slice")
......@@ -2332,7 +2342,7 @@ class IndexNode(ExprNode):
self.result(),
code.error_goto(self.pos)))
code.put_gotref(self.py_result())
elif self.type is PyrexTypes.c_py_unicode_type and self.base.type is unicode_type:
elif self.type.is_unicode_char and self.base.type is unicode_type:
assert self.index.type.is_int
index_code = self.index.result()
function = "__Pyx_GetItemInt_Unicode"
......@@ -2688,32 +2698,26 @@ class SliceNode(ExprNode):
# stop ExprNode
# step ExprNode
subexprs = ['start', 'stop', 'step']
type = py_object_type
is_temp = 1
def calculate_constant_result(self):
self.constant_result = self.base.constant_result[
self.start.constant_result : \
self.stop.constant_result : \
self.step.constant_result]
self.constant_result = slice(
self.start.constant_result,
self.stop.constant_result,
self.step.constant_result)
def compile_time_value(self, denv):
start = self.start.compile_time_value(denv)
if self.stop is None:
stop = None
else:
stop = self.stop.compile_time_value(denv)
if self.step is None:
step = None
else:
step = self.step.compile_time_value(denv)
stop = self.stop.compile_time_value(denv)
step = self.step.compile_time_value(denv)
try:
return slice(start, stop, step)
except Exception, e:
self.compile_time_value_error(e)
subexprs = ['start', 'stop', 'step']
def analyse_types(self, env):
self.start.analyse_types(env)
self.stop.analyse_types(env)
......@@ -2721,10 +2725,21 @@ class SliceNode(ExprNode):
self.start = self.start.coerce_to_pyobject(env)
self.stop = self.stop.coerce_to_pyobject(env)
self.step = self.step.coerce_to_pyobject(env)
if self.start.is_literal and self.stop.is_literal and self.step.is_literal:
self.is_literal = True
self.is_temp = False
gil_message = "Constructing Python slice object"
def calculate_result_code(self):
return self.result_code
def generate_result_code(self, code):
if self.is_literal:
self.result_code = code.get_py_const(py_object_type, 'slice_', cleanup_level=2)
code = code.get_cached_constants_writer()
code.mark_pos(self.pos)
code.putln(
"%s = PySlice_New(%s, %s, %s); %s" % (
self.result(),
......@@ -2733,6 +2748,8 @@ class SliceNode(ExprNode):
self.step.py_result(),
code.error_goto_if_null(self.result(), self.pos)))
code.put_gotref(self.py_result())
if self.is_literal:
code.put_giveref(self.py_result())
class CallNode(ExprNode):
......@@ -2920,6 +2937,13 @@ class SimpleCallNode(CallNode):
func_type = func_type.base_type
return func_type
def is_simple(self):
# C function calls could be considered simple, but they may
# have side-effects that may hit when multiple operations must
# be effected in order, e.g. when constructing the argument
# sequence for a function call or comparing values.
return False
def analyse_c_function_call(self, env):
if self.function.type is error_type:
self.type = error_type
......@@ -2958,17 +2982,32 @@ class SimpleCallNode(CallNode):
self.has_optional_args = 1
self.is_temp = 1
# Coerce arguments
for i in range(min(max_nargs, actual_nargs)):
some_args_in_temps = False
for i in xrange(min(max_nargs, actual_nargs)):
formal_type = func_type.args[i].type
arg = self.args[i].coerce_to(formal_type, env)
if arg.type.is_pyobject and not env.nogil and (arg.is_attribute or not arg.is_simple):
# we do not own the argument's reference, but we must
# make sure it cannot be collected before we return
# from the function, so we create an owned temp
# reference to it
arg = arg.coerce_to_temp(env)
if arg.is_temp:
if i > 0:
# first argument in temp doesn't impact subsequent arguments
some_args_in_temps = True
elif arg.type.is_pyobject and not env.nogil:
if i == 0 and self.self is not None:
# a method's cloned "self" argument is ok
pass
elif arg.is_name and arg.entry and arg.entry.is_local and not arg.entry.in_closure:
# plain local variables are ok
pass
else:
# we do not safely own the argument's reference,
# but we must make sure it cannot be collected
# before we return from the function, so we create
# an owned temp reference to it
if i > 0: # first argument doesn't matter
some_args_in_temps = True
arg = arg.coerce_to_temp(env)
self.args[i] = arg
for i in range(max_nargs, actual_nargs):
# handle additional varargs parameters
for i in xrange(max_nargs, actual_nargs):
arg = self.args[i]
if arg.type.is_pyobject:
arg_ctype = arg.type.default_coerced_ctype()
......@@ -2976,7 +3015,31 @@ class SimpleCallNode(CallNode):
error(self.args[i].pos,
"Python object cannot be passed as a varargs parameter")
else:
self.args[i] = arg.coerce_to(arg_ctype, env)
self.args[i] = arg = arg.coerce_to(arg_ctype, env)
if arg.is_temp and i > 0:
some_args_in_temps = True
if some_args_in_temps:
# if some args are temps and others are not, they may get
# constructed in the wrong order (temps first) => make
# sure they are either all temps or all not temps (except
# for the last argument, which is evaluated last in any
# case)
for i in xrange(actual_nargs-1):
if i == 0 and self.self is not None:
continue # self is ok
arg = self.args[i]
if arg.is_name and arg.entry and (
(arg.entry.is_local and not arg.entry.in_closure)
or arg.entry.type.is_cfunction):
# local variables and C functions are safe
pass
elif env.nogil and arg.type.is_pyobject:
# can't copy a Python reference into a temp in nogil
# env (this is safe: a construction would fail in
# nogil anyway)
pass
else:
self.args[i] = arg.coerce_to_temp(env)
# Calc result type and code fragment
if isinstance(self.function, NewExprNode):
self.type = PyrexTypes.CPtrType(self.function.class_type)
......@@ -3979,6 +4042,10 @@ class TupleNode(SequenceNode):
self.is_temp = 0
self.is_literal = 1
def is_simple(self):
# either temp or constant => always simple
return True
def calculate_result_code(self):
if len(self.args) > 0:
return self.result_code
......@@ -5413,6 +5480,10 @@ class TypecastNode(ExprNode):
elif self.type.is_complex and self.operand.type.is_complex:
self.operand = self.operand.coerce_to_simple(env)
def is_simple(self):
# either temp or a C cast => no side effects
return True
def nogil_check(self, env):
if self.type and self.type.is_pyobject and self.is_temp:
self.gil_error()
......@@ -5843,8 +5914,8 @@ class NumBinopNode(BinopNode):
self.operand2.result())
def is_py_operation_types(self, type1, type2):
return (type1 is PyrexTypes.c_py_unicode_type or
type2 is PyrexTypes.c_py_unicode_type or
return (type1.is_unicode_char or
type2.is_unicode_char or
BinopNode.is_py_operation_types(self, type1, type2))
def py_operation_function(self):
......@@ -6501,7 +6572,7 @@ class CmpNode(object):
return self.operator in ('in', 'not_in') and \
((self.operand1.type.is_int
and (self.operand2.type.is_string or self.operand2.type is bytes_type)) or
(self.operand1.type is PyrexTypes.c_py_unicode_type
(self.operand1.type.is_unicode_char
and self.operand2.type is unicode_type))
def is_ptr_contains(self):
......@@ -7069,6 +7140,9 @@ class PyTypeTestNode(CoercionNode):
return False
return self.arg.may_be_none()
def is_simple(self):
return self.arg.is_simple()
def result_in_temp(self):
return self.arg.result_in_temp()
......@@ -7119,6 +7193,9 @@ class NoneCheckNode(CoercionNode):
def may_be_none(self):
return False
def is_simple(self):
return self.arg.is_simple()
def result_in_temp(self):
return self.arg.result_in_temp()
......@@ -7158,7 +7235,7 @@ class CoerceToPyTypeNode(CoercionNode):
# be specific about some known types
if arg.type.is_string:
self.type = bytes_type
elif arg.type is PyrexTypes.c_py_unicode_type:
elif arg.type.is_unicode_char:
self.type = unicode_type
elif arg.type.is_complex:
self.type = Builtin.complex_type
......@@ -7419,6 +7496,9 @@ class CloneNode(CoercionNode):
if hasattr(self.arg, 'entry'):
self.entry = self.arg.entry
def is_simple(self):
return True # result is always in a temp (or a name)
def generate_evaluation_code(self, code):
pass
......
......@@ -607,9 +607,6 @@ class CFuncDeclaratorNode(CDeclaratorNode):
error(self.exception_value.pos,
"Exception value incompatible with function return type")
exc_check = self.exception_check
if return_type.is_array:
error(self.pos,
"Function cannot return an array")
if return_type.is_cfunction:
error(self.pos,
"Function cannot return a function")
......@@ -1658,6 +1655,9 @@ class CFuncDefNode(FuncDefNode):
api = self.api, modifiers = self.modifiers)
self.entry.inline_func_in_pxd = self.inline_in_pxd
self.return_type = type.return_type
if self.return_type.is_array and visibility != 'extern':
error(self.pos,
"Function cannot return an array")
if self.overridable and not env.is_module_scope:
if len(self.args) < 1 or not self.args[0].type.is_pyobject:
......
......@@ -1937,7 +1937,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
node.pos, cfunc_name, self.PyObject_Size_func_type,
args = [arg],
is_temp = node.is_temp)
elif arg.type is PyrexTypes.c_py_unicode_type:
elif arg.type.is_unicode_char:
return ExprNodes.IntNode(node.pos, value='1', constant_result=1,
type=node.type)
else:
......@@ -1995,20 +1995,21 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
builtin_type = entry.type
if builtin_type and builtin_type is not Builtin.type_type:
type_check_function = entry.type.type_check_function(exact=False)
if type_check_function in tests:
continue
tests.append(type_check_function)
type_check_args = [arg]
elif test_type_node.type is Builtin.type_type:
type_check_function = '__Pyx_TypeCheck'
type_check_args = [arg, test_type_node]
else:
return node
if type_check_function not in tests:
tests.append(type_check_function)
test_nodes.append(
ExprNodes.PythonCapiCallNode(
test_type_node.pos, type_check_function, self.Py_type_check_func_type,
args = type_check_args,
is_temp = True,
))
test_nodes.append(
ExprNodes.PythonCapiCallNode(
test_type_node.pos, type_check_function, self.Py_type_check_func_type,
args = type_check_args,
is_temp = True,
))
def join_with_or(a,b, make_binop_node=ExprNodes.binop_node):
or_node = make_binop_node(node.pos, 'or', a, b)
......@@ -2028,7 +2029,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
return node
arg = pos_args[0]
if isinstance(arg, ExprNodes.CoerceToPyTypeNode):
if arg.arg.type is PyrexTypes.c_py_unicode_type:
if arg.arg.type.is_unicode_char:
return arg.arg.coerce_to(node.type, self.current_env())
return node
......@@ -2191,7 +2192,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
return node
ustring = args[0]
if not isinstance(ustring, ExprNodes.CoerceToPyTypeNode) or \
ustring.arg.type is not PyrexTypes.c_py_unicode_type:
not ustring.arg.type.is_unicode_char:
return node
uchar = ustring.arg
method_name = node.function.attribute
......@@ -2230,7 +2231,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
return node
ustring = args[0]
if not isinstance(ustring, ExprNodes.CoerceToPyTypeNode) or \
ustring.arg.type is not PyrexTypes.c_py_unicode_type:
not ustring.arg.type.is_unicode_char:
return node
uchar = ustring.arg
method_name = node.function.attribute
......
......@@ -2049,6 +2049,7 @@ basic_c_type_names = ("void", "char", "int", "float", "double", "bint")
special_basic_c_types = {
# name : (signed, longness)
"Py_UNICODE" : (0, 0),
"Py_UCS4" : (0, 0),
"Py_ssize_t" : (2, 0),
"ssize_t" : (2, 0),
"size_t" : (0, 0),
......
......@@ -49,6 +49,7 @@ class PyrexType(BaseType):
# is_typedef boolean Is a typedef type
# is_string boolean Is a C char * type
# is_unicode boolean Is a UTF-8 encoded C char * type
# is_unicode_char boolean Is either Py_UCS4 or Py_UNICODE
# is_returncode boolean Is used only to signal exceptions
# is_error boolean Is the dummy error type
# is_buffer boolean Is buffer access type
......@@ -101,6 +102,7 @@ class PyrexType(BaseType):
is_typedef = 0
is_string = 0
is_unicode = 0
is_unicode_char = 0
is_returncode = 0
is_error = 0
is_buffer = 0
......@@ -924,9 +926,81 @@ class CBIntType(CIntType):
return "<CNumericType bint>"
class CPyUCS4IntType(CIntType):
# Py_UCS4
is_unicode_char = True
# Py_UCS4 coerces from and to single character unicode strings (or
# at most two characters on 16bit Unicode builds), but we also
# allow Python integers as input. The value range for Py_UCS4
# is 0..1114111, which is checked when converting from an integer
# value.
to_py_function = "PyUnicode_FromOrdinal"
from_py_function = "__Pyx_PyObject_AsPy_UCS4"
def create_from_py_utility_code(self, env):
env.use_utility_code(pyobject_as_py_ucs4_utility_code)
return True
def sign_and_name(self):
return "Py_UCS4"
pyobject_as_py_ucs4_utility_code = UtilityCode(
proto='''
static CYTHON_INLINE Py_UCS4 __Pyx_PyObject_AsPy_UCS4(PyObject*);
''',
impl='''
static CYTHON_INLINE Py_UCS4 __Pyx_PyObject_AsPy_UCS4(PyObject* x) {
long ival;
if (PyUnicode_Check(x)) {
if (likely(PyUnicode_GET_SIZE(x) == 1)) {
return PyUnicode_AS_UNICODE(x)[0];
}
#if Py_UNICODE_SIZE == 2
else if (PyUnicode_GET_SIZE(x) == 2) {
Py_UCS4 high_val = PyUnicode_AS_UNICODE(x)[0];
if (high_val >= 0xD800 && high_val <= 0xDBFF) {
Py_UCS4 low_val = PyUnicode_AS_UNICODE(x)[1];
if (low_val >= 0xDC00 && low_val <= 0xDFFF) {
return 0x10000 | ((high_val & ((1<<10)-1)) << 10) | (low_val & ((1<<10)-1));
}
}
}
#endif
PyErr_Format(PyExc_ValueError,
"only single character unicode strings can be converted to Py_UCS4, got length "
#if PY_VERSION_HEX < 0x02050000
"%d",
#else
"%zd",
#endif
PyUnicode_GET_SIZE(x));
return (Py_UCS4)-1;
}
ival = __Pyx_PyInt_AsLong(x);
if (unlikely(ival < 0)) {
if (!PyErr_Occurred())
PyErr_SetString(PyExc_OverflowError,
"cannot convert negative value to Py_UCS4");
return (Py_UCS4)-1;
} else if (unlikely(ival > 1114111)) {
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to Py_UCS4");
return (Py_UCS4)-1;
}
return (Py_UCS4)ival;
}
''')
class CPyUnicodeIntType(CIntType):
# Py_UNICODE
is_unicode_char = True
# Py_UNICODE coerces from and to single character unicode strings,
# but we also allow Python integers as input. The value range for
# Py_UNICODE is 0..1114111, which is checked when converting from
......@@ -2306,6 +2380,7 @@ c_anon_enum_type = CAnonEnumType(-1)
c_returncode_type = CReturnCodeType(RANK_INT)
c_bint_type = CBIntType(RANK_INT)
c_py_unicode_type = CPyUnicodeIntType(RANK_INT-0.5, UNSIGNED)
c_py_ucs4_type = CPyUCS4IntType(RANK_LONG-0.5, UNSIGNED)
c_py_ssize_t_type = CPySSizeTType(RANK_LONG+0.5, SIGNED)
c_ssize_t_type = CSSizeTType(RANK_LONG+0.5, SIGNED)
c_size_t_type = CSizeTType(RANK_LONG+0.5, UNSIGNED)
......@@ -2367,6 +2442,7 @@ modifiers_and_name_to_type = {
(1, 0, "bint"): c_bint_type,
(0, 0, "Py_UNICODE"): c_py_unicode_type,
(0, 0, "Py_UCS4"): c_py_ucs4_type,
(2, 0, "Py_ssize_t"): c_py_ssize_t_type,
(2, 0, "ssize_t") : c_ssize_t_type,
(0, 0, "size_t") : c_size_t_type,
......@@ -2614,6 +2690,8 @@ def parse_basic_type(name):
longness = 0
if name == 'Py_UNICODE':
signed = 0
elif name == 'Py_UCS4':
signed = 0
elif name == 'Py_ssize_t':
signed = 2
elif name == 'ssize_t':
......
......@@ -29,6 +29,49 @@ with open(codefile) as f:
# can't access the module anymore. Get it from sys.modules instead.
build_ext = sys.modules['Cython.Distutils.build_ext']
have_gdb = None
def test_gdb():
global have_gdb
if have_gdb is None:
try:
p = subprocess.Popen(['gdb', '-v'], stdout=subprocess.PIPE)
have_gdb = True
except OSError:
# gdb was not installed
have_gdb = False
else:
gdb_version = p.stdout.read().decode('ascii')
p.wait()
p.stdout.close()
if have_gdb:
# Based on Lib/test/test_gdb.py
regex = "^GNU gdb [^\d]*(\d+)\.(\d+)"
gdb_version_number = list(map(int, re.search(regex, gdb_version).groups()))
if gdb_version_number >= [7, 2]:
python_version_script = tempfile.NamedTemporaryFile(mode='w+')
python_version_script.write(
'python import sys; print("%s %s" % sys.version_info[:2])')
python_version_script.flush()
p = subprocess.Popen(['gdb', '-batch', '-x', python_version_script.name],
stdout=subprocess.PIPE)
python_version = p.stdout.read().decode('ascii')
p.wait()
python_version_number = list(map(int, python_version.split()))
# Be Python 3 compatible
if (not have_gdb
or gdb_version_number < [7, 2]
or python_version_number < [2, 6]):
warnings.warn(
'Skipping gdb tests, need gdb >= 7.2 with Python >= 2.6')
have_gdb = False
return have_gdb
class DebuggerTestCase(unittest.TestCase):
def setUp(self):
......@@ -36,6 +79,9 @@ class DebuggerTestCase(unittest.TestCase):
Run gdb and have cygdb import the debug information from the code
defined in TestParseTreeTransforms's setUp method
"""
if not test_gdb():
return
self.tempdir = tempfile.mkdtemp()
self.destfile = os.path.join(self.tempdir, 'codefile.pyx')
self.debug_dest = os.path.join(self.tempdir,
......@@ -44,62 +90,77 @@ class DebuggerTestCase(unittest.TestCase):
self.cfuncs_destfile = os.path.join(self.tempdir, 'cfuncs')
self.cwd = os.getcwd()
os.chdir(self.tempdir)
shutil.copy(codefile, self.destfile)
shutil.copy(cfuncs_file, self.cfuncs_destfile + '.c')
compiler = ccompiler.new_compiler()
compiler.compile(['cfuncs.c'], debug=True, extra_postargs=['-fPIC'])
opts = dict(
test_directory=self.tempdir,
module='codefile',
)
optimization_disabler = build_ext.Optimization()
optimization_disabler.disable_optimization()
cython_compile_testcase = runtests.CythonCompileTestCase(
workdir=self.tempdir,
# we clean up everything (not only compiled files)
cleanup_workdir=False,
**opts
)
cython_compile_testcase.run_cython(
targetdir=self.tempdir,
incdir=None,
annotate=False,
extra_compile_options={
'gdb_debug':True,
'output_dir':self.tempdir,
},
**opts
)
cython_compile_testcase.run_distutils(
incdir=None,
workdir=self.tempdir,
extra_extension_args={'extra_objects':['cfuncs.o']},
**opts
)
optimization_disabler.restore_state()
# ext = Cython.Distutils.extension.Extension(
# 'codefile',
# ['codefile.pyx'],
# pyrex_gdb=True,
# extra_objects=['cfuncs.o'])
#
# distutils.core.setup(
# script_args=['build_ext', '--inplace'],
# ext_modules=[ext],
# cmdclass=dict(build_ext=Cython.Distutils.build_ext)
# )
try:
os.chdir(self.tempdir)
shutil.copy(codefile, self.destfile)
shutil.copy(cfuncs_file, self.cfuncs_destfile + '.c')
compiler = ccompiler.new_compiler()
compiler.compile(['cfuncs.c'], debug=True, extra_postargs=['-fPIC'])
opts = dict(
test_directory=self.tempdir,
module='codefile',
)
optimization_disabler = build_ext.Optimization()
cython_compile_testcase = runtests.CythonCompileTestCase(
workdir=self.tempdir,
# we clean up everything (not only compiled files)
cleanup_workdir=False,
**opts
)
new_stderr = open(os.devnull, 'w')
stderr = sys.stderr
sys.stderr = new_stderr
optimization_disabler.disable_optimization()
try:
cython_compile_testcase.run_cython(
targetdir=self.tempdir,
incdir=None,
annotate=False,
extra_compile_options={
'gdb_debug':True,
'output_dir':self.tempdir,
},
**opts
)
cython_compile_testcase.run_distutils(
incdir=None,
workdir=self.tempdir,
extra_extension_args={'extra_objects':['cfuncs.o']},
**opts
)
finally:
optimization_disabler.restore_state()
sys.stderr = stderr
# ext = Cython.Distutils.extension.Extension(
# 'codefile',
# ['codefile.pyx'],
# pyrex_gdb=True,
# extra_objects=['cfuncs.o'])
#
# distutils.core.setup(
# script_args=['build_ext', '--inplace'],
# ext_modules=[ext],
# cmdclass=dict(build_ext=Cython.Distutils.build_ext)
# )
except:
os.chdir(self.cwd)
raise
def tearDown(self):
if not test_gdb():
return
os.chdir(self.cwd)
shutil.rmtree(self.tempdir)
......@@ -107,6 +168,9 @@ class DebuggerTestCase(unittest.TestCase):
class GdbDebuggerTestCase(DebuggerTestCase):
def setUp(self):
if not test_gdb():
return
super(GdbDebuggerTestCase, self).setUp()
prefix_code = textwrap.dedent('''\
......@@ -166,25 +230,26 @@ class GdbDebuggerTestCase(DebuggerTestCase):
p.wait()
p.stdout.close()
if have_gdb:
python_version_script = tempfile.NamedTemporaryFile(mode='w+')
python_version_script.write(
'python import sys; print("%s %s" % sys.version_info[:2])')
python_version_script.flush()
p = subprocess.Popen(['gdb', '-batch', '-x', python_version_script.name],
stdout=subprocess.PIPE)
python_version = p.stdout.read().decode('ascii')
p.wait()
python_version_number = [int(a) for a in python_version.split()]
if have_gdb:
# Based on Lib/test/test_gdb.py
regex = "^GNU gdb [^\d]*(\d+)\.(\d+)"
gdb_version_number = re.search(regex, gdb_version).groups()
gdb_version_number = list(map(int, re.search(regex, gdb_version).groups()))
if gdb_version_number >= [7, 2]:
python_version_script = tempfile.NamedTemporaryFile(mode='w+')
python_version_script.write(
'python import sys; print("%s %s" % sys.version_info[:2])')
python_version_script.flush()
p = subprocess.Popen(['gdb', '-batch', '-x', python_version_script.name],
stdout=subprocess.PIPE)
python_version = p.stdout.read().decode('ascii')
p.wait()
python_version_number = list(map(int, python_version.split()))
# Be Python 3 compatible
if (not have_gdb
or list(map(int, gdb_version_number)) < [7, 2]
or gdb_version_number < [7, 2]
or python_version_number < [2, 6]):
self.p = None
warnings.warn(
......@@ -197,6 +262,9 @@ class GdbDebuggerTestCase(DebuggerTestCase):
env=env)
def tearDown(self):
if not test_gdb():
return
super(GdbDebuggerTestCase, self).tearDown()
if self.p:
self.p.stderr.close()
......@@ -207,17 +275,24 @@ class GdbDebuggerTestCase(DebuggerTestCase):
class TestAll(GdbDebuggerTestCase):
def test_all(self):
if self.p is None:
if not test_gdb():
return
out, err = self.p.communicate()
err = err.decode('UTF-8')
border = '*' * 30
start = '%s v INSIDE GDB v %s' % (border, border)
end = '%s ^ INSIDE GDB ^ %s' % (border, border)
errmsg = '\n%s\n%s%s' % (start, err, end)
self.assertEquals(0, self.p.wait(), errmsg)
sys.stderr.write(err)
exit_status = self.p.wait()
if exit_status == 1:
sys.stderr.write(err)
elif exit_status >= 2:
border = '*' * 30
start = '%s v INSIDE GDB v %s' % (border, border)
end = '%s ^ INSIDE GDB ^ %s' % (border, border)
errmsg = '\n%s\n%s%s' % (start, err, end)
sys.stderr.write(errmsg)
if __name__ == '__main__':
unittest.main()
......@@ -428,7 +428,7 @@ def run_unittest_in_module(modulename):
"debugging information. Either compile python with "
"-g or get a debug build (configure with --with-pydebug).")
warnings.warn(msg)
os._exit(0)
os._exit(1)
else:
m = __import__(modulename, fromlist=[''])
tests = inspect.getmembers(m, inspect.isclass)
......@@ -453,7 +453,7 @@ def runtests():
success_libpython = run_unittest_in_module(test_libpython_in_gdb.__name__)
if not success_libcython or not success_libpython:
sys.exit(1)
sys.exit(2)
def main(version, trace_code=False):
global inferior_python_version
......
......@@ -194,7 +194,7 @@ class _XMLTestResult(_TextTestResult):
module = ''
testcase_name = module + testcase.__name__
if not tests_by_testcase.has_key(testcase_name):
if testcase_name not in tests_by_testcase:
tests_by_testcase[testcase_name] = []
tests_by_testcase[testcase_name].append(test_info)
......
__version__ = "0.14.1rc2"
__version__ = "0.14.1rc3"
# Void cython.* directives (for case insensitive operating systems).
from Cython.Shadow import *
......@@ -26,7 +26,6 @@ try:
except ImportError: # No threads, no problems
threading = None
WITH_CYTHON = True
from distutils.dist import Distribution
......@@ -34,6 +33,18 @@ from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
distutils_distro = Distribution()
if sys.platform == 'win32':
# TODO: Figure out why this hackery (see http://thread.gmane.org/gmane.comp.python.cython.devel/8280/).
config_files = distutils_distro.find_config_files()
try: config_files.remove('setup.cfg')
except ValueError: pass
distutils_distro.parse_config_files(config_files)
cfgfiles = distutils_distro.find_config_files()
try: cfgfiles.remove('setup.cfg')
except ValueError: pass
distutils_distro.parse_config_files(cfgfiles)
TEST_DIRS = ['compile', 'errors', 'run', 'wrappers', 'pyregr', 'build']
TEST_RUN_DIRS = ['run', 'wrappers', 'pyregr']
......@@ -981,6 +992,9 @@ def check_thread_termination(ignore_seen=True):
raise PendingThreadsError("left-over threads found after running test")
def main():
DISTDIR = os.path.join(os.getcwd(), os.path.dirname(sys.argv[0]))
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--no-cleanup", dest="cleanup_workdir",
......@@ -1051,12 +1065,15 @@ def main():
parser.add_option("--exit-ok", dest="exit_ok", default=False,
action="store_true",
help="exit without error code even on test failures")
parser.add_option("--root-dir", dest="root_dir", default=os.path.join(DISTDIR, 'tests'),
help="working directory")
parser.add_option("--work-dir", dest="work_dir", default=os.path.join(os.getcwd(), 'BUILD'),
help="working directory")
options, cmd_args = parser.parse_args()
DISTDIR = os.path.join(os.getcwd(), os.path.dirname(sys.argv[0]))
ROOTDIR = os.path.join(DISTDIR, 'tests')
WORKDIR = os.path.join(os.getcwd(), 'BUILD')
ROOTDIR = os.path.abspath(options.root_dir)
WORKDIR = os.path.abspath(options.work_dir)
if sys.version_info[0] >= 3:
options.doctests = False
......@@ -1167,7 +1184,7 @@ def main():
exclude_selectors += [ re.compile(r, re.I|re.U).search for r in options.exclude ]
if not test_bugs:
exclude_selectors += [ FileListExcluder("tests/bugs.txt") ]
exclude_selectors += [ FileListExcluder(os.path.join(ROOTDIR, "bugs.txt")) ]
if sys.platform in ['win32', 'cygwin'] and sys.version_info < (2,6):
exclude_selectors += [ lambda x: x == "run.specialfloat" ]
......
......@@ -13,5 +13,5 @@ _ERRORS = """
2:14: Only single-character string literals can be coerced into ints.
3:14: Only single-character string literals can be coerced into ints.
6:15: Only single-character string literals can be coerced into ints.
9:14: Unicode literals do not support coercion to C types other than Py_UNICODE.
9:14: Unicode literals do not support coercion to C types other than Py_UNICODE or Py_UCS4.
"""
# -*- coding: iso-8859-1 -*-
cdef Py_UCS4 char_ASCII = u'A'
cdef Py_UCS4 char_KLINGON = u'\uF8D2'
def char_too_long_ASCII():
cdef Py_UCS4 c = u'AB'
def char_too_long_Unicode():
cdef Py_UCS4 c = u'A\uF8D2'
def char_too_long_bytes():
cdef Py_UCS4 c = b'AB'
def char_too_long_latin1():
cdef Py_UCS4 char_bytes_latin1 = b'\xf6'
_ERRORS = """
7:21: Only single-character Unicode string literals or surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.
10:21: Only single-character Unicode string literals or surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.
13:21: Only single-character string literals can be coerced into ints.
16:37: Bytes literals cannot coerce to Py_UNICODE/Py_UCS4, use a unicode literal instead.
"""
......@@ -17,8 +17,8 @@ def char_too_long_latin1():
_ERRORS = """
7:24: Only single-character Unicode string literals can be coerced into Py_UNICODE.
10:24: Only single-character Unicode string literals can be coerced into Py_UNICODE.
7:24: Only single-character Unicode string literals or surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.
10:24: Only single-character Unicode string literals or surrogate pairs can be coerced into Py_UCS4/Py_UNICODE.
13:24: Only single-character string literals can be coerced into ints.
16:40: Bytes literals cannot coerce to Py_UNICODE, use a unicode literal instead.
16:40: Bytes literals cannot coerce to Py_UNICODE/Py_UCS4, use a unicode literal instead.
"""
......@@ -50,7 +50,7 @@ cdef list l_f2 = b1
cdef list l_f3 = u1
_ERRORS = u"""
25:20: Unicode literals do not support coercion to C types other than Py_UNICODE.
25:20: Unicode literals do not support coercion to C types other than Py_UNICODE or Py_UCS4.
26:22: Unicode objects do not support coercion to C types.
27:22: 'str' objects do not support coercion to C types (use 'bytes'?).
......
......@@ -77,6 +77,23 @@ def test_custom():
assert isinstance(A(), A)
return True
cdef class B:
pass
cdef class C:
pass
def test_custom_tuple(obj):
"""
>>> test_custom_tuple(A())
True
>>> test_custom_tuple(B())
True
>>> test_custom_tuple(C())
False
"""
return isinstance(obj, (A,B))
def test_nested(x):
"""
>>> test_nested(1)
......
# -*- coding: iso-8859-1 -*-
cimport cython
cdef Py_UCS4 char_ASCII = u'A'
cdef Py_UCS4 char_KLINGON = u'\uF8D2'
def compare_ASCII():
"""
>>> compare_ASCII()
True
False
False
"""
print(char_ASCII == u'A')
print(char_ASCII == u'B')
print(char_ASCII == u'\uF8D2')
def compare_klingon():
"""
>>> compare_klingon()
True
False
False
"""
print(char_KLINGON == u'\uF8D2')
print(char_KLINGON == u'A')
print(char_KLINGON == u'B')
from cpython.unicode cimport PyUnicode_FromOrdinal
import sys
u0 = u'\x00'
u1 = u'\x01'
umax = PyUnicode_FromOrdinal(sys.maxunicode)
def unicode_ordinal(Py_UCS4 i):
"""
>>> ord(unicode_ordinal(0)) == 0
True
>>> ord(unicode_ordinal(1)) == 1
True
>>> ord(unicode_ordinal(sys.maxunicode)) == sys.maxunicode
True
>>> ord(unicode_ordinal(u0)) == 0
True
>>> ord(unicode_ordinal(u1)) == 1
True
>>> ord(unicode_ordinal(umax)) == sys.maxunicode
True
Value too small:
>>> unicode_ordinal(-1) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
OverflowError: ...
Value too large:
>>> unicode_ordinal(1114111+1) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
OverflowError: ...
Less than one character:
>>> unicode_ordinal(u0[:0])
Traceback (most recent call last):
...
ValueError: only single character unicode strings can be converted to Py_UCS4, got length 0
More than one character:
>>> unicode_ordinal(u0+u1)
Traceback (most recent call last):
...
ValueError: only single character unicode strings can be converted to Py_UCS4, got length 2
"""
return i
@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def unicode_type_methods(Py_UCS4 uchar):
"""
>>> unicode_type_methods(ord('A'))
[True, True, False, False, False, False, False, True, True]
>>> unicode_type_methods(ord('a'))
[True, True, False, False, True, False, False, False, False]
>>> unicode_type_methods(ord('8'))
[True, False, True, True, False, True, False, False, False]
>>> unicode_type_methods(ord('\\t'))
[False, False, False, False, False, False, True, False, False]
"""
return [
# character types
uchar.isalnum(),
uchar.isalpha(),
uchar.isdecimal(),
uchar.isdigit(),
uchar.islower(),
uchar.isnumeric(),
uchar.isspace(),
uchar.istitle(),
uchar.isupper(),
]
@cython.test_assert_path_exists('//PythonCapiCallNode')
@cython.test_fail_if_path_exists('//SimpleCallNode')
def unicode_methods(Py_UCS4 uchar):
"""
>>> unicode_methods(ord('A')) == ['a', 'A', 'A']
True
>>> unicode_methods(ord('a')) == ['a', 'A', 'A']
True
"""
return [
# character conversion
uchar.lower(),
uchar.upper(),
uchar.title(),
]
@cython.test_assert_path_exists('//IntNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
'//PythonCapiCallNode')
def len_uchar(Py_UCS4 uchar):
"""
>>> len_uchar(ord('A'))
1
"""
return len(uchar)
def index_uchar(Py_UCS4 uchar, Py_ssize_t i):
"""
>>> index_uchar(ord('A'), 0) == ('A', 'A', 'A')
True
>>> index_uchar(ord('A'), -1) == ('A', 'A', 'A')
True
>>> index_uchar(ord('A'), 1)
Traceback (most recent call last):
IndexError: string index out of range
"""
return uchar[0], uchar[-1], uchar[i]
mixed_ustring = u'AbcDefGhIjKlmnoP'
lower_ustring = mixed_ustring.lower()
upper_ustring = mixed_ustring.lower()
@cython.test_assert_path_exists('//PythonCapiCallNode',
'//ForFromStatNode')
@cython.test_fail_if_path_exists('//SimpleCallNode',
'//ForInStatNode')
def count_lower_case_characters(unicode ustring):
"""
>>> count_lower_case_characters(mixed_ustring)
10
>>> count_lower_case_characters(lower_ustring)
16
"""
cdef Py_ssize_t count = 0
for uchar in ustring:
if uchar.islower():
count += 1
return count
@cython.test_assert_path_exists('//SwitchStatNode',
'//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def iter_and_in():
"""
>>> iter_and_in()
a
b
e
f
h
"""
for c in u'abcdefgh':
if c in u'abCDefGh':
print c
@cython.test_assert_path_exists('//SwitchStatNode',
'//ForFromStatNode')
@cython.test_fail_if_path_exists('//ForInStatNode')
def index_and_in():
"""
>>> index_and_in()
1
3
4
7
8
"""
cdef int i
for i in range(1,9):
if u'abcdefgh'[-i] in u'abCDefGh':
print i
# function call arguments
arg_order = []
cdef int f():
arg_order.append(1)
return 1
def g():
arg_order.append(2)
return 2
cdef call2(int x, object o):
return x, o
def test_c_call():
"""
>>> arg_order
[]
>>> test_c_call()
(1, 2)
>>> arg_order
[1, 2]
"""
return call2(f(), g())
# module globals
cdef object X = 1
cdef redefine_global():
global X
x,X = X,2
return x
cdef call3(object x1, int o, object x2):
return (x1, o, x2)
def test_global_redefine():
"""
>>> test_global_redefine()
(1, 1, 2)
"""
return call3(X, redefine_global(), X)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment