extracted wxStringTokenizer tests in their own file and rewrote them to be table based and more clear (separated strtok()-compatibility test from the other one)
git-svn-id: https://svn.wxwidgets.org/svn/wx/wxWidgets/trunk@36505 c3d73ce0-8a6f-49c7-b76d-6d57e0e08775
This commit is contained in:
174
tests/strings/tokenizer.cpp
Normal file
174
tests/strings/tokenizer.cpp
Normal file
@@ -0,0 +1,174 @@
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// Name: tests/strings/strings.cpp
|
||||
// Purpose: wxStringTokenizer unit test
|
||||
// Author: Vadim Zeitlin
|
||||
// Created: 2005-12-20 (extacted from strings.cpp)
|
||||
// RCS-ID: $Id$
|
||||
// Copyright: (c) 2004-2005 Vadim Zeitlin
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// headers
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
#include "testprec.h"
|
||||
|
||||
#ifdef __BORLANDC__
|
||||
#pragma hdrstop
|
||||
#endif
|
||||
|
||||
#ifndef WX_PRECOMP
|
||||
#include "wx/wx.h"
|
||||
#endif // WX_PRECOMP
|
||||
|
||||
#include "wx/tokenzr.h"
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// test class
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
class TokenizerTestCase : public CppUnit::TestCase
|
||||
{
|
||||
public:
|
||||
TokenizerTestCase() { }
|
||||
|
||||
private:
|
||||
CPPUNIT_TEST_SUITE( TokenizerTestCase );
|
||||
CPPUNIT_TEST( GetCount );
|
||||
CPPUNIT_TEST( GetPosition );
|
||||
CPPUNIT_TEST( StrtokCompat );
|
||||
CPPUNIT_TEST_SUITE_END();
|
||||
|
||||
void GetCount();
|
||||
void GetPosition();
|
||||
void StrtokCompat();
|
||||
|
||||
DECLARE_NO_COPY_CLASS(TokenizerTestCase)
|
||||
};
|
||||
|
||||
// register in the unnamed registry so that these tests are run by default
|
||||
CPPUNIT_TEST_SUITE_REGISTRATION( TokenizerTestCase );
|
||||
|
||||
// also include in it's own registry so that these tests can be run alone
|
||||
CPPUNIT_TEST_SUITE_NAMED_REGISTRATION( TokenizerTestCase, "TokenizerTestCase" );
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// test data
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
static const struct TokenizerTestData
|
||||
{
|
||||
// the string to tokenize
|
||||
const wxChar *str;
|
||||
|
||||
// the delimiters to use
|
||||
const wxChar *delims;
|
||||
|
||||
// the tokenizer mode
|
||||
wxStringTokenizerMode mode;
|
||||
|
||||
// expected number of tokens
|
||||
size_t count;
|
||||
}
|
||||
gs_testData[] =
|
||||
{
|
||||
{ _T(""), _T(" "), wxTOKEN_DEFAULT , 0 },
|
||||
{ _T("Hello, world"), _T(" "), wxTOKEN_DEFAULT , 2 },
|
||||
{ _T("Hello, world "), _T(" "), wxTOKEN_DEFAULT , 2 },
|
||||
{ _T("Hello, world"), _T(","), wxTOKEN_DEFAULT , 2 },
|
||||
{ _T("Hello, world!"), _T(",!"), wxTOKEN_DEFAULT , 2 },
|
||||
{ _T("Hello,, world!"), _T(",!"), wxTOKEN_DEFAULT , 3 },
|
||||
{ _T("Hello, world!"), _T(",!"), wxTOKEN_RET_EMPTY_ALL, 3 },
|
||||
{ _T("username:password:uid:gid:gecos:home:shell"),
|
||||
_T(":"), wxTOKEN_DEFAULT , 7 },
|
||||
{ _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_DEFAULT , 4 },
|
||||
{ _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_RET_EMPTY , 6 },
|
||||
{ _T("1 \t3\t4 6 "), wxDEFAULT_DELIMITERS, wxTOKEN_RET_EMPTY_ALL, 9 },
|
||||
{ _T("01/02/99"), _T("/-"), wxTOKEN_DEFAULT , 3 },
|
||||
{ _T("01-02/99"), _T("/-"), wxTOKEN_RET_DELIMS , 3 },
|
||||
};
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// the tests
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
void TokenizerTestCase::GetCount()
|
||||
{
|
||||
for ( size_t n = 0; n < WXSIZEOF(gs_testData); n++ )
|
||||
{
|
||||
const TokenizerTestData& ttd = gs_testData[n];
|
||||
|
||||
wxStringTokenizer tkz(ttd.str, ttd.delims, ttd.mode);
|
||||
CPPUNIT_ASSERT( tkz.CountTokens() == ttd.count );
|
||||
|
||||
size_t count = 0;
|
||||
while ( tkz.HasMoreTokens() )
|
||||
{
|
||||
tkz.GetNextToken();
|
||||
count++;
|
||||
}
|
||||
|
||||
CPPUNIT_ASSERT_EQUAL( ttd.count, count );
|
||||
}
|
||||
}
|
||||
|
||||
// call this with the string to tokenize, delimeters to use and the expected
|
||||
// positions (i.e. results of GetPosition()) after each GetNextToken() call,
|
||||
// terminate positions with 0
|
||||
static void
|
||||
DoTestGetPosition(const wxChar *s, const wxChar *delims, int pos, ...)
|
||||
{
|
||||
wxStringTokenizer tkz(s, delims);
|
||||
|
||||
CPPUNIT_ASSERT_EQUAL( (size_t)0, tkz.GetPosition() );
|
||||
|
||||
va_list ap;
|
||||
va_start(ap, pos);
|
||||
|
||||
for ( ;; )
|
||||
{
|
||||
if ( !pos )
|
||||
{
|
||||
CPPUNIT_ASSERT( !tkz.HasMoreTokens() );
|
||||
break;
|
||||
}
|
||||
|
||||
tkz.GetNextToken();
|
||||
|
||||
CPPUNIT_ASSERT_EQUAL( (size_t)pos, tkz.GetPosition() );
|
||||
|
||||
pos = va_arg(ap, int);
|
||||
}
|
||||
|
||||
va_end(ap);
|
||||
}
|
||||
|
||||
void TokenizerTestCase::GetPosition()
|
||||
{
|
||||
DoTestGetPosition(_T("foo"), _T("_"), 3, 0);
|
||||
DoTestGetPosition(_T("foo_bar"), _T("_"), 4, 7, 0);
|
||||
DoTestGetPosition(_T("foo_bar_"), _T("_"), 4, 8, 0);
|
||||
}
|
||||
|
||||
void TokenizerTestCase::StrtokCompat()
|
||||
{
|
||||
for ( size_t n = 0; n < WXSIZEOF(gs_testData); n++ )
|
||||
{
|
||||
const TokenizerTestData& ttd = gs_testData[n];
|
||||
if ( ttd.mode != wxTOKEN_STRTOK )
|
||||
continue;
|
||||
|
||||
wxCharBuffer buf(ttd.str);
|
||||
wxChar *last;
|
||||
wxChar *s = wxStrtok(buf.data(), ttd.delims, &last);
|
||||
|
||||
wxStringTokenizer tkz(ttd.str, ttd.delims, ttd.mode);
|
||||
while ( tkz.HasMoreTokens() )
|
||||
{
|
||||
CPPUNIT_ASSERT_EQUAL( wxString(s), tkz.GetNextToken() );
|
||||
s = wxStrtok(NULL, ttd.delims, &last);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user