Commit e2713bec authored by Martin v. Löwis's avatar Martin v. Löwis

Build with --disable-unicode again. Fixes #1158607.

Will backport to 2.4.
parent b60ae996
......@@ -720,11 +720,19 @@ def make_encoding_map(decoding_map):
### error handlers
strict_errors = lookup_error("strict")
ignore_errors = lookup_error("ignore")
replace_errors = lookup_error("replace")
xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
backslashreplace_errors = lookup_error("backslashreplace")
try:
strict_errors = lookup_error("strict")
ignore_errors = lookup_error("ignore")
replace_errors = lookup_error("replace")
xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
backslashreplace_errors = lookup_error("backslashreplace")
except LookupError:
# In --disable-unicode builds, these error handler are missing
strict_errors = None
ignore_errors = None
replace_errors = None
xmlcharrefreplace_errors = None
backslashreplace_errors = None
# Tell modulefinder that using codecs probably needs the encodings
# package
......
......@@ -202,12 +202,12 @@ d[float] = _deepcopy_atomic
d[bool] = _deepcopy_atomic
try:
d[complex] = _deepcopy_atomic
except AttributeError:
except NameError:
pass
d[str] = _deepcopy_atomic
try:
d[unicode] = _deepcopy_atomic
except AttributeError:
except NameError:
pass
try:
d[types.CodeType] = _deepcopy_atomic
......
......@@ -144,7 +144,7 @@ else:
TESTFN_UNICODE_UNENCODEABLE = None
else:
# Japanese characters (I think - from bug 846133)
TESTFN_UNICODE_UNENCODEABLE = u"@test-\u5171\u6709\u3055\u308c\u308b"
TESTFN_UNICODE_UNENCODEABLE = eval('u"@test-\u5171\u6709\u3055\u308c\u308b"')
try:
# XXX - Note - should be using TESTFN_ENCODING here - but for
# Windows, "mbcs" currently always operates as if in
......
......@@ -221,6 +221,8 @@ Library
Build
-----
- Bug #1158607: Build with --disable-unicode again.
- spwdmodule.c is built only if either HAVE_GETSPNAM or HAVE_HAVE_GETSPENT is
defined. Discovered as a result of not being able to build on OS X.
......
......@@ -104,8 +104,15 @@ codec_encode(PyObject *self, PyObject *args)
if (!PyArg_ParseTuple(args, "O|ss:encode", &v, &encoding, &errors))
return NULL;
#ifdef Py_USING_UNICODE
if (encoding == NULL)
encoding = PyUnicode_GetDefaultEncoding();
#else
if (encoding == NULL) {
PyErr_SetString(PyExc_ValueError, "no encoding specified");
return NULL;
}
#endif
/* Encode via the codec registry */
v = PyCodec_Encode(v, encoding, errors);
......@@ -137,8 +144,15 @@ codec_decode(PyObject *self, PyObject *args)
if (!PyArg_ParseTuple(args, "O|ss:decode", &v, &encoding, &errors))
return NULL;
#ifdef Py_USING_UNICODE
if (encoding == NULL)
encoding = PyUnicode_GetDefaultEncoding();
#else
if (encoding == NULL) {
PyErr_SetString(PyExc_ValueError, "no encoding specified");
return NULL;
}
#endif
/* Decode via the codec registry */
v = PyCodec_Decode(v, encoding, errors);
......
......@@ -838,8 +838,10 @@ static PyGetSetDef PyTclObject_getsetlist[] = {
};
static PyMethodDef PyTclObject_methods[] = {
#ifdef Py_USING_UNICODE
{"__unicode__", (PyCFunction)PyTclObject_unicode, METH_NOARGS,
PyTclObject_unicode__doc__},
#endif
{0}
};
......@@ -991,7 +993,7 @@ FromObj(PyObject* tkapp, Tcl_Obj *value)
}
}
#else
res = PyString_FromStringAndSize(value->bytes, value->length);
result = PyString_FromStringAndSize(value->bytes, value->length);
#endif
return result;
}
......
......@@ -799,11 +799,12 @@ class PyBuildExt(build_ext):
))
# Hye-Shik Chang's CJKCodecs modules.
exts.append(Extension('_multibytecodec',
['cjkcodecs/multibytecodec.c']))
for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
exts.append(Extension('_codecs_' + loc,
['cjkcodecs/_codecs_%s.c' % loc]))
if have_unicode:
exts.append(Extension('_multibytecodec',
['cjkcodecs/multibytecodec.c']))
for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
exts.append(Extension('_codecs_' + loc,
['cjkcodecs/_codecs_%s.c' % loc]))
# Dynamic loading module
if sys.maxint == 0x7fffffff:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment