Use wxChar16 instead of wxDecodeSurrogate_t for UTF-16 unit type in wxDecodeSurrogate
This commit is contained in:
@@ -122,18 +122,12 @@ static size_t decode_utf16(const wxUint16* input, wxUint32& output)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef WC_UTF16
|
|
||||||
typedef wchar_t wxDecodeSurrogate_t;
|
|
||||||
#else // !WC_UTF16
|
|
||||||
typedef wxUint16 wxDecodeSurrogate_t;
|
|
||||||
#endif // WC_UTF16/!WC_UTF16
|
|
||||||
|
|
||||||
// returns the next UTF-32 character from the wchar_t buffer and advances the
|
// returns the next UTF-32 character from the wchar_t buffer and advances the
|
||||||
// pointer to the character after this one
|
// pointer to the character after this one
|
||||||
//
|
//
|
||||||
// if an invalid character is found, *pSrc is set to NULL, the caller must
|
// if an invalid character is found, *pSrc is set to NULL, the caller must
|
||||||
// check for this
|
// check for this
|
||||||
static wxUint32 wxDecodeSurrogate(const wxDecodeSurrogate_t **pSrc)
|
static wxUint32 wxDecodeSurrogate(const wxChar16 **pSrc)
|
||||||
{
|
{
|
||||||
wxUint32 out;
|
wxUint32 out;
|
||||||
const size_t
|
const size_t
|
||||||
|
Reference in New Issue
Block a user