in wxGetFontEncFromCharSet(): map only DEFAULT_CHARSET to wxFONTENCODING_SYSTEM (but not all the others); also handle SYMBOL_CHARSET separately
git-svn-id: https://svn.wxwidgets.org/svn/wx/wxWidgets/trunk@26705 c3d73ce0-8a6f-49c7-b76d-6d57e0e08775
This commit is contained in:
@@ -190,7 +190,10 @@ wxFontEncoding wxGetFontEncFromCharSet(int cs)
|
|||||||
switch ( cs )
|
switch ( cs )
|
||||||
{
|
{
|
||||||
default:
|
default:
|
||||||
// assume the system charset
|
wxFAIL_MSG( _T("unexpected Win32 charset") );
|
||||||
|
// fall through and assume the system charset
|
||||||
|
|
||||||
|
case DEFAULT_CHARSET:
|
||||||
fontEncoding = wxFONTENCODING_SYSTEM;
|
fontEncoding = wxFONTENCODING_SYSTEM;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
@@ -198,6 +201,11 @@ wxFontEncoding wxGetFontEncFromCharSet(int cs)
|
|||||||
fontEncoding = wxFONTENCODING_CP1252;
|
fontEncoding = wxFONTENCODING_CP1252;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case SYMBOL_CHARSET:
|
||||||
|
// what can we do here?
|
||||||
|
fontEncoding = wxFONTENCODING_MAX;
|
||||||
|
break;
|
||||||
|
|
||||||
#if defined(__WIN32__) && !defined(__WXMICROWIN__)
|
#if defined(__WIN32__) && !defined(__WXMICROWIN__)
|
||||||
case EASTEUROPE_CHARSET:
|
case EASTEUROPE_CHARSET:
|
||||||
fontEncoding = wxFONTENCODING_CP1250;
|
fontEncoding = wxFONTENCODING_CP1250;
|
||||||
|
Reference in New Issue
Block a user