Commit afb995d7 authored by Kevin R. Thornton's avatar Kevin R. Thornton

Merge pull request #1 from cython/master

Merge upstream
parents e414aa44 01b47caa
......@@ -1324,7 +1324,7 @@ class GlobalState(object):
for py_string in c.py_strings.values():
py_strings.append((c.cname, len(py_string.cname), py_string))
for c, cname in self.pyunicode_ptr_const_index.items():
for c, cname in sorted(self.pyunicode_ptr_const_index.items()):
utf16_array, utf32_array = StringEncoding.encode_pyunicode_string(c)
if utf16_array:
# Narrow and wide representations differ
......
......@@ -3421,11 +3421,8 @@ class IndexNode(_IndexingBaseNode):
elif base_type.is_buffer and len(indices) == base_type.ndim:
# Buffer indexing
is_buffer_access = True
for index in indices:
index = index.analyse_types(env)
if not index.type.is_int:
is_buffer_access = False
if is_buffer_access:
indices = [index.analyse_types(env) for index in indices]
if all(index.type.is_int for index in indices):
replacement_node = BufferIndexNode(self.pos, indices=indices, base=self.base)
# On cloning, indices is cloned. Otherwise, unpack index into indices.
assert not isinstance(self.index, CloneNode)
......
......@@ -432,14 +432,19 @@ def create_default_resultobj(compilation_source, options):
result.main_source_file = compilation_source.source_desc.filename
result.compilation_source = compilation_source
source_desc = compilation_source.source_desc
if options.output_file:
result.c_file = os.path.join(compilation_source.cwd, options.output_file)
if options.cplus:
c_suffix = ".cpp"
else:
if options.cplus:
c_suffix = ".cpp"
c_suffix = ".c"
suggested_file_name = Utils.replace_suffix(source_desc.filename, c_suffix)
if options.output_file:
out_path = os.path.join(compilation_source.cwd, options.output_file)
if os.path.isdir(out_path):
result.c_file = os.path.join(out_path, os.path.basename(suggested_file_name))
else:
c_suffix = ".c"
result.c_file = Utils.replace_suffix(source_desc.filename, c_suffix)
result.c_file = out_path
else:
result.c_file = suggested_file_name
result.embedded_metadata = options.embedded_metadata
return result
......
......@@ -1987,7 +1987,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def create_import_star_conversion_utility_code(self, env):
# Create all conversion helpers that are needed for "import *" assignments.
# Must be done before code generation to support CythonUtilityCode.
for name, entry in env.entries.items():
for name, entry in sorted(env.entries.items()):
if entry.is_cglobal and entry.used:
if not entry.type.is_pyobject:
entry.type.create_from_py_utility_code(env)
......@@ -2016,7 +2016,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
old_error_label = code.new_error_label()
code.putln("if (0);") # so the first one can be "else if"
for name, entry in env.entries.items():
for name, entry in sorted(env.entries.items()):
if entry.is_cglobal and entry.used:
code.putln('else if (__Pyx_StrEq(name, "%s")) {' % name)
if entry.type.is_pyobject:
......@@ -2461,7 +2461,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
entry.type.global_init_code(entry, code)
def generate_wrapped_entries_code(self, env, code):
for name, entry in env.entries.items():
for name, entry in sorted(env.entries.items()):
if (entry.create_wrapper
and not entry.is_type
and entry.scope is env):
......
......@@ -5126,8 +5126,8 @@ class CascadedAssignmentNode(AssignmentNode):
# collect distinct types used on the LHS
lhs_types = set()
for lhs in self.lhs_list:
lhs.analyse_target_types(env)
for i, lhs in enumerate(self.lhs_list):
lhs = self.lhs_list[i] = lhs.analyse_target_types(env)
lhs.gil_assignment_check(env)
lhs_types.add(lhs.type)
......@@ -7832,7 +7832,7 @@ class ParallelStatNode(StatNode, ParallelNode):
def initialize_privates_to_nan(self, code, exclude=None):
first = True
for entry, (op, lastprivate) in self.privates.items():
for entry, (op, lastprivate) in sorted(self.privates.items()):
if not op and (not exclude or entry != exclude):
invalid_value = entry.type.invalid_value()
......@@ -7874,7 +7874,7 @@ class ParallelStatNode(StatNode, ParallelNode):
"""
self.modified_entries = []
for entry in self.assignments:
for entry in sorted(self.assignments):
if entry.from_closure or entry.in_closure:
self._allocate_closure_temp(code, entry)
......@@ -7899,7 +7899,7 @@ class ParallelStatNode(StatNode, ParallelNode):
self.temps = temps = code.funcstate.stop_collecting_temps()
privates, firstprivates = [], []
for temp, type in temps:
for temp, type in sorted(temps):
if type.is_pyobject or type.is_memoryviewslice:
firstprivates.append(temp)
else:
......@@ -7922,7 +7922,7 @@ class ParallelStatNode(StatNode, ParallelNode):
# Now clean up any memoryview slice and object temporaries
if self.is_parallel and not self.is_nested_prange:
code.putln("/* Clean up any temporaries */")
for temp, type in self.temps:
for temp, type in sorted(self.temps):
if type.is_memoryviewslice:
code.put_xdecref_memoryviewslice(temp, have_gil=False)
elif type.is_pyobject:
......@@ -8094,7 +8094,7 @@ class ParallelStatNode(StatNode, ParallelNode):
c = self.begin_of_parallel_control_block_point
temp_count = 0
for entry, (op, lastprivate) in self.privates.items():
for entry, (op, lastprivate) in sorted(self.privates.items()):
if not lastprivate or entry.type.is_pyobject:
continue
......@@ -8303,7 +8303,7 @@ class ParallelWithBlockNode(ParallelStatNode):
if self.privates:
privates = [e.cname for e in self.privates
if not e.type.is_pyobject]
code.put('private(%s)' % ', '.join(privates))
code.put('private(%s)' % ', '.join(sorted(privates)))
self.privatization_insertion_point = code.insertion_point()
self.put_num_threads(code)
......@@ -8623,7 +8623,7 @@ class ParallelRangeNode(ParallelStatNode):
code.putln("#ifdef _OPENMP")
code.put("#pragma omp for")
for entry, (op, lastprivate) in self.privates.items():
for entry, (op, lastprivate) in sorted(self.privates.items()):
# Don't declare the index variable as a reduction
if op and op in "+*-&^|" and entry != self.target.entry:
if entry.type.is_pyobject:
......
......@@ -636,7 +636,7 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
'operator.postincrement': ExprNodes.inc_dec_constructor(False, '++'),
'operator.postdecrement': ExprNodes.inc_dec_constructor(False, '--'),
# For backwards compatability.
# For backwards compatibility.
'address': ExprNodes.AmpersandNode,
}
......
......@@ -223,6 +223,12 @@ class Entry(object):
def all_entries(self):
return [self] + self.inner_entries
def __lt__(left, right):
if isinstance(left, Entry) and isinstance(right, Entry):
return (left.name, left.cname) < (right.name, right.cname)
else:
return NotImplemented
class InnerEntry(Entry):
"""
......
# Present for backwards compatability
# Present for backwards compatibility
from cpython cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.bool cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.buffer cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.bytes cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.cobject cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.complex cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.dict cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.exc cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.float cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.function cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.getargs cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.instance cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.int cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.iterator cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.list cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.long cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.mapping cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.mem cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.method cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.module cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.number cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.object cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.oldbuffer cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.pycapsule cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.ref cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.sequence cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.set cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.string cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.tuple cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.type cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.unicode cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.version cimport *
# Present for backwards compatability
# Present for backwards compatibility
from cpython.weakref cimport *
# Present for backwards compatability
# Present for backwards compatibility
from libc.stdio cimport *
# Present for backwards compatability
# Present for backwards compatibility
from libc.stdlib cimport *
......@@ -5,7 +5,7 @@ cdef extern from "Python.h":
cdef enum:
PyBUF_SIMPLE,
PyBUF_WRITABLE,
PyBUF_WRITEABLE, # backwards compatability
PyBUF_WRITEABLE, # backwards compatibility
PyBUF_FORMAT,
PyBUF_ND,
PyBUF_STRIDES,
......
......@@ -3,7 +3,7 @@
# If any of the PyArray_* functions are called, import_array must be
# called first.
#
# This also defines backwards-compatability buffer acquisition
# This also defines backwards-compatibility buffer acquisition
# code for use in Python 2.x (or Python <= 2.5 when NumPy starts
# implementing PEP-3118 directly).
#
......
# Note that the actual size of these types is system-dependant, and
# Note that the actual size of these types is system-dependent, and
# can't be detected at C compile time. However, the generated C code
# will correctly use the actual size of these types *except* for
# determining promotion in binary arithmetic expressions involving
......
......@@ -753,11 +753,11 @@ __pyx_memviewslice_index_full(const char *bufp, Py_ssize_t idx,
/////////////// MemviewDtypeToObject.proto ///////////////
{{if to_py_function}}
static PyObject *{{get_function}}(const char *itemp); /* proto */
static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp); /* proto */
{{endif}}
{{if from_py_function}}
static int {{set_function}}(const char *itemp, PyObject *obj); /* proto */
static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj); /* proto */
{{endif}}
/////////////// MemviewDtypeToObject ///////////////
......@@ -767,13 +767,13 @@ static int {{set_function}}(const char *itemp, PyObject *obj); /* proto */
/* Convert a dtype to or from a Python object */
{{if to_py_function}}
static PyObject *{{get_function}}(const char *itemp) {
static CYTHON_INLINE PyObject *{{get_function}}(const char *itemp) {
return (PyObject *) {{to_py_function}}(*({{dtype}} *) itemp);
}
{{endif}}
{{if from_py_function}}
static int {{set_function}}(const char *itemp, PyObject *obj) {
static CYTHON_INLINE int {{set_function}}(const char *itemp, PyObject *obj) {
{{dtype}} value = {{from_py_function}}(obj);
if ({{error_condition}})
return 0;
......
......@@ -483,6 +483,63 @@ def set_int_2d(object[int, ndim=2] buf, int i, int j, int value):
"""
buf[i, j] = value
@testcase
def set_int_2d_cascaded(object[int, ndim=2] buf, int i, int j, int value):
"""
Uses get_int_2d to read back the value afterwards. For pure
unit test, one should support reading in MockBuffer instead.
>>> C = IntMockBuffer("C", range(6), (2,3))
>>> set_int_2d_cascaded(C, 1, 1, 10)
acquired C
released C
10
>>> get_int_2d(C, 1, 1)
acquired C
released C
10
Check negative indexing:
>>> set_int_2d_cascaded(C, -1, 0, 3)
acquired C
released C
3
>>> get_int_2d(C, -1, 0)
acquired C
released C
3
>>> set_int_2d_cascaded(C, -1, -2, 8)
acquired C
released C
8
>>> get_int_2d(C, -1, -2)
acquired C
released C
8
>>> set_int_2d_cascaded(C, -2, -3, 9)
acquired C
released C
9
>>> get_int_2d(C, -2, -3)
acquired C
released C
9
Out-of-bounds errors:
>>> set_int_2d_cascaded(C, 2, 0, 19)
Traceback (most recent call last):
IndexError: Out of bounds on buffer access (axis 0)
>>> set_int_2d_cascaded(C, 0, -4, 19)
Traceback (most recent call last):
IndexError: Out of bounds on buffer access (axis 1)
"""
cdef int casc_value
buf[i, j] = casc_value = value
return casc_value
@testcase
def list_comprehension(object[int] buf, len):
"""
......@@ -1184,3 +1241,14 @@ def test_inplace_assignment():
buf[0] = get_int()
print buf[0]
@testcase
def test_nested_assignment():
"""
>>> test_nested_assignment()
100
"""
cdef object[int] inner = IntMockBuffer(None, [1, 2, 3])
cdef object[int] outer = IntMockBuffer(None, [1, 2, 3])
outer[inner[0]] = 100
return outer[inner[0]]
......@@ -17,7 +17,8 @@ available_flags = (
cdef class MockBuffer:
cdef object format, offset
cdef void* buffer
cdef int len, itemsize, ndim
cdef Py_ssize_t len, itemsize
cdef int ndim
cdef Py_ssize_t* strides
cdef Py_ssize_t* shape
cdef Py_ssize_t* suboffsets
......@@ -55,7 +56,7 @@ cdef class MockBuffer:
else: break
if len(datashape) > 1:
# indirect access
self.ndim = len(datashape)
self.ndim = <int>len(datashape)
shape = datashape
self.buffer = self.create_indirect_buffer(data, shape)
suboffsets = [0] * (self.ndim-1) + [-1]
......@@ -64,7 +65,7 @@ cdef class MockBuffer:
else:
# strided and/or simple access
self.buffer = self.create_buffer(data)
self.ndim = len(shape)
self.ndim = <int>len(shape)
self.suboffsets = NULL
try:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment