extracted wxStringTokenizer tests in their own file and rewrote them to be table based and more clear (separated strtok()-compatibility test from the other one)

git-svn-id: https://svn.wxwidgets.org/svn/wx/wxWidgets/trunk@36505 c3d73ce0-8a6f-49c7-b76d-6d57e0e08775
This commit is contained in:
Vadim Zeitlin
2005-12-20 16:30:12 +00:00
parent 80ca48ebc6
commit a67f601b14
3 changed files with 175 additions and 99 deletions

View File

@@ -21,8 +21,6 @@
#include "wx/wx.h"
#endif // WX_PRECOMP
#include "wx/tokenzr.h"
// ----------------------------------------------------------------------------
// test class
// ----------------------------------------------------------------------------
@@ -46,8 +44,6 @@ private:
#endif // wxUSE_WCHAR_T
CPPUNIT_TEST( Extraction );
CPPUNIT_TEST( Find );
CPPUNIT_TEST( Tokenizer );
CPPUNIT_TEST( TokenizerGetPosition );
CPPUNIT_TEST( Replace );
CPPUNIT_TEST( Match );
CPPUNIT_TEST( CaseChanges );
@@ -70,9 +66,6 @@ private:
#endif // wxUSE_WCHAR_T
void Extraction();
void Find();
void SingleTokenizerTest( wxChar *str, wxChar *delims, size_t count , wxStringTokenizerMode mode );
void Tokenizer();
void TokenizerGetPosition();
void Replace();
void Match();
void CaseChanges();
@@ -380,98 +373,6 @@ void StringTestCase::Find()
#undef TEST_FIND
}
void StringTestCase::SingleTokenizerTest( wxChar *str, wxChar *delims, size_t count , wxStringTokenizerMode mode )
{
wxStringTokenizer tkz( str, delims, mode);
CPPUNIT_ASSERT( tkz.CountTokens() == count );
wxChar *buf, *s = NULL, *last;
if ( tkz.GetMode() == wxTOKEN_STRTOK )
{
buf = new wxChar[wxStrlen(str) + 1];
wxStrcpy(buf, str);
s = wxStrtok(buf, delims, &last);
}
else
{
buf = NULL;
}
size_t count2 = 0;
while ( tkz.HasMoreTokens() )
{
wxString token = tkz.GetNextToken();
if ( buf )
{
CPPUNIT_ASSERT( token == s );
s = wxStrtok(NULL, delims, &last);
}
count2++;
}
CPPUNIT_ASSERT( count2 == count );
if ( buf )
{
delete [] buf;
}
}
void StringTestCase::Tokenizer()
{
SingleTokenizerTest( _T(""), _T(" "), 0, wxTOKEN_DEFAULT );
SingleTokenizerTest( _T("Hello, world"), _T(" "), 2, wxTOKEN_DEFAULT );
SingleTokenizerTest( _T("Hello, world "), _T(" "), 2, wxTOKEN_DEFAULT );
SingleTokenizerTest( _T("Hello, world"), _T(","), 2, wxTOKEN_DEFAULT );
SingleTokenizerTest( _T("Hello, world!"), _T(",!"), 2, wxTOKEN_DEFAULT );
SingleTokenizerTest( _T("Hello,, world!"), _T(",!"), 3, wxTOKEN_DEFAULT );
SingleTokenizerTest( _T("Hello, world!"), _T(",!"), 3, wxTOKEN_RET_EMPTY_ALL );
SingleTokenizerTest( _T("username:password:uid:gid:gecos:home:shell"), _T(":"), 7, wxTOKEN_DEFAULT );
SingleTokenizerTest( _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, 4, wxTOKEN_DEFAULT );
SingleTokenizerTest( _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, 6, wxTOKEN_RET_EMPTY );
SingleTokenizerTest( _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, 9, wxTOKEN_RET_EMPTY_ALL );
SingleTokenizerTest( _T("01/02/99"), _T("/-"), 3, wxTOKEN_DEFAULT );
SingleTokenizerTest( _T("01-02/99"), _T("/-"), 3, wxTOKEN_RET_DELIMS );
}
// call this with the string to tokenize, delimeters to use and the expected
// positions (i.e. results of GetPosition()) after each GetNextToken() call,
// terminate positions with 0
static void DoTokenizerGetPosition(const wxChar *s,
const wxChar *delims, int pos, ...)
{
wxStringTokenizer tkz(s, delims);
CPPUNIT_ASSERT( tkz.GetPosition() == 0 );
va_list ap;
va_start(ap, pos);
for ( ;; )
{
if ( !pos )
{
CPPUNIT_ASSERT( !tkz.HasMoreTokens() );
break;
}
tkz.GetNextToken();
CPPUNIT_ASSERT( tkz.GetPosition() == (size_t)pos );
pos = va_arg(ap, int);
}
va_end(ap);
}
void StringTestCase::TokenizerGetPosition()
{
DoTokenizerGetPosition(_T("foo"), _T("_"), 3, 0);
DoTokenizerGetPosition(_T("foo_bar"), _T("_"), 4, 7, 0);
DoTokenizerGetPosition(_T("foo_bar_"), _T("_"), 4, 8, 0);
}
void StringTestCase::Replace()
{
#define TEST_REPLACE( original , pos , len , replacement , result ) \

174
tests/strings/tokenizer.cpp Normal file
View File

@@ -0,0 +1,174 @@
///////////////////////////////////////////////////////////////////////////////
// Name: tests/strings/strings.cpp
// Purpose: wxStringTokenizer unit test
// Author: Vadim Zeitlin
// Created: 2005-12-20 (extacted from strings.cpp)
// RCS-ID: $Id$
// Copyright: (c) 2004-2005 Vadim Zeitlin
///////////////////////////////////////////////////////////////////////////////
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
#include "testprec.h"
#ifdef __BORLANDC__
#pragma hdrstop
#endif
#ifndef WX_PRECOMP
#include "wx/wx.h"
#endif // WX_PRECOMP
#include "wx/tokenzr.h"
// ----------------------------------------------------------------------------
// test class
// ----------------------------------------------------------------------------
class TokenizerTestCase : public CppUnit::TestCase
{
public:
TokenizerTestCase() { }
private:
CPPUNIT_TEST_SUITE( TokenizerTestCase );
CPPUNIT_TEST( GetCount );
CPPUNIT_TEST( GetPosition );
CPPUNIT_TEST( StrtokCompat );
CPPUNIT_TEST_SUITE_END();
void GetCount();
void GetPosition();
void StrtokCompat();
DECLARE_NO_COPY_CLASS(TokenizerTestCase)
};
// register in the unnamed registry so that these tests are run by default
CPPUNIT_TEST_SUITE_REGISTRATION( TokenizerTestCase );
// also include in it's own registry so that these tests can be run alone
CPPUNIT_TEST_SUITE_NAMED_REGISTRATION( TokenizerTestCase, "TokenizerTestCase" );
// ----------------------------------------------------------------------------
// test data
// ----------------------------------------------------------------------------
static const struct TokenizerTestData
{
// the string to tokenize
const wxChar *str;
// the delimiters to use
const wxChar *delims;
// the tokenizer mode
wxStringTokenizerMode mode;
// expected number of tokens
size_t count;
}
gs_testData[] =
{
{ _T(""), _T(" "), wxTOKEN_DEFAULT , 0 },
{ _T("Hello, world"), _T(" "), wxTOKEN_DEFAULT , 2 },
{ _T("Hello, world "), _T(" "), wxTOKEN_DEFAULT , 2 },
{ _T("Hello, world"), _T(","), wxTOKEN_DEFAULT , 2 },
{ _T("Hello, world!"), _T(",!"), wxTOKEN_DEFAULT , 2 },
{ _T("Hello,, world!"), _T(",!"), wxTOKEN_DEFAULT , 3 },
{ _T("Hello, world!"), _T(",!"), wxTOKEN_RET_EMPTY_ALL, 3 },
{ _T("username:password:uid:gid:gecos:home:shell"),
_T(":"), wxTOKEN_DEFAULT , 7 },
{ _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_DEFAULT , 4 },
{ _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_RET_EMPTY , 6 },
{ _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_RET_EMPTY_ALL, 9 },
{ _T("01/02/99"), _T("/-"), wxTOKEN_DEFAULT , 3 },
{ _T("01-02/99"), _T("/-"), wxTOKEN_RET_DELIMS , 3 },
};
// ----------------------------------------------------------------------------
// the tests
// ----------------------------------------------------------------------------
void TokenizerTestCase::GetCount()
{
for ( size_t n = 0; n < WXSIZEOF(gs_testData); n++ )
{
const TokenizerTestData& ttd = gs_testData[n];
wxStringTokenizer tkz(ttd.str, ttd.delims, ttd.mode);
CPPUNIT_ASSERT( tkz.CountTokens() == ttd.count );
size_t count = 0;
while ( tkz.HasMoreTokens() )
{
tkz.GetNextToken();
count++;
}
CPPUNIT_ASSERT_EQUAL( ttd.count, count );
}
}
// call this with the string to tokenize, delimeters to use and the expected
// positions (i.e. results of GetPosition()) after each GetNextToken() call,
// terminate positions with 0
static void
DoTestGetPosition(const wxChar *s, const wxChar *delims, int pos, ...)
{
wxStringTokenizer tkz(s, delims);
CPPUNIT_ASSERT_EQUAL( (size_t)0, tkz.GetPosition() );
va_list ap;
va_start(ap, pos);
for ( ;; )
{
if ( !pos )
{
CPPUNIT_ASSERT( !tkz.HasMoreTokens() );
break;
}
tkz.GetNextToken();
CPPUNIT_ASSERT_EQUAL( (size_t)pos, tkz.GetPosition() );
pos = va_arg(ap, int);
}
va_end(ap);
}
void TokenizerTestCase::GetPosition()
{
DoTestGetPosition(_T("foo"), _T("_"), 3, 0);
DoTestGetPosition(_T("foo_bar"), _T("_"), 4, 7, 0);
DoTestGetPosition(_T("foo_bar_"), _T("_"), 4, 8, 0);
}
void TokenizerTestCase::StrtokCompat()
{
for ( size_t n = 0; n < WXSIZEOF(gs_testData); n++ )
{
const TokenizerTestData& ttd = gs_testData[n];
if ( ttd.mode != wxTOKEN_STRTOK )
continue;
wxCharBuffer buf(ttd.str);
wxChar *last;
wxChar *s = wxStrtok(buf.data(), ttd.delims, &last);
wxStringTokenizer tkz(ttd.str, ttd.delims, ttd.mode);
while ( tkz.HasMoreTokens() )
{
CPPUNIT_ASSERT_EQUAL( wxString(s), tkz.GetNextToken() );
s = wxStrtok(NULL, ttd.delims, &last);
}
}
}

View File

@@ -43,6 +43,7 @@
scopeguard/scopeguardtest.cpp
strings/strings.cpp
strings/stdstrings.cpp
strings/tokenizer.cpp
strings/unicode.cpp
strings/crt.cpp
streams/bstream.cpp