aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAvatar Piotr Jaroszyński <peper@gentoo.org> 2007-10-15 11:26:17 +0000
committerAvatar Piotr Jaroszyński <peper@gentoo.org> 2007-10-15 11:26:17 +0000
commit20dde20c875bce797a6b47a1e543822aab6ffabc (patch)
treebb577771afe7228751c2d9d081d16660bfc82f5e
parent503481c165de28bc0a5db25b7848708df7cb2668 (diff)
downloadpaludis-20dde20c875bce797a6b47a1e543822aab6ffabc.tar.gz
paludis-20dde20c875bce797a6b47a1e543822aab6ffabc.tar.xz
Stop instantiating tokeniser. Fixes: ticket:396
-rw-r--r--paludis/environments/no_config/no_config_environment.cc2
-rw-r--r--paludis/environments/paludis/keywords_conf.cc2
-rw-r--r--paludis/environments/paludis/licenses_conf.cc2
-rw-r--r--paludis/environments/paludis/mirrors_conf.cc2
-rw-r--r--paludis/environments/paludis/use_conf.cc2
-rw-r--r--paludis/environments/portage/portage_environment.cc18
-rw-r--r--paludis/hooker.cc2
-rw-r--r--paludis/host_tuple_name.cc3
-rw-r--r--paludis/install_task.cc2
-rw-r--r--paludis/repositories/cran/cran_dep_parser.cc5
-rw-r--r--paludis/repositories/cran/cran_installed_repository.cc2
-rw-r--r--paludis/repositories/cran/cran_package_id.cc2
-rw-r--r--paludis/repositories/e/dep_lexer.cc3
-rw-r--r--paludis/repositories/e/dep_parser.cc2
-rw-r--r--paludis/repositories/e/e_key.cc12
-rw-r--r--paludis/repositories/e/e_repository.cc8
-rw-r--r--paludis/repositories/e/e_repository_profile.cc20
-rw-r--r--paludis/repositories/e/eapi.cc11
-rw-r--r--paludis/repositories/e/eapi_phase.cc5
-rw-r--r--paludis/repositories/e/ebuild.cc2
-rw-r--r--paludis/repositories/e/ebuild_entries.cc2
-rw-r--r--paludis/repositories/e/ebuild_flat_metadata_cache.cc4
-rw-r--r--paludis/repositories/e/make_ebin_repository.cc2
-rw-r--r--paludis/repositories/e/make_ebuild_repository.cc4
-rw-r--r--paludis/repositories/e/manifest2_reader.cc2
-rw-r--r--paludis/repositories/e/qa/visibility.cc2
-rw-r--r--paludis/repositories/e/vdb_merger.cc4
-rw-r--r--paludis/repositories/e/vdb_repository.cc2
-rw-r--r--paludis/repositories/e/vdb_unmerger.cc6
-rw-r--r--paludis/repositories/e/xml_things.cc4
-rw-r--r--paludis/repositories/fake/fake_package_id.cc4
-rw-r--r--paludis/repositories/unpackaged/ndbam.cc3
-rw-r--r--paludis/repositories/unpackaged/ndbam_merger.cc4
-rw-r--r--paludis/repositories/unpackaged/ndbam_unmerger.cc4
-rw-r--r--paludis/set_file.cc4
-rw-r--r--paludis/util/config_file.cc4
-rw-r--r--paludis/util/files.m42
-rw-r--r--paludis/util/tokeniser.cc31
-rw-r--r--paludis/util/tokeniser.hh43
-rw-r--r--paludis/util/tokeniser_TEST.cc30
-rw-r--r--src/clients/adjutrix/downgrade_check.cc2
-rw-r--r--src/clients/contrarius/stage.cc2
-rw-r--r--src/clients/contrarius/target_config.cc5
-rw-r--r--src/clients/reconcilio/broken_linkage_finder/configuration.cc39
-rw-r--r--src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc2
-rw-r--r--src/output/console_query_task.cc2
46 files changed, 142 insertions, 178 deletions
diff --git a/paludis/environments/no_config/no_config_environment.cc b/paludis/environments/no_config/no_config_environment.cc
index a5838bd..0161376 100644
--- a/paludis/environments/no_config/no_config_environment.cc
+++ b/paludis/environments/no_config/no_config_environment.cc
@@ -329,7 +329,7 @@ NoConfigEnvironment::accept_keywords(tr1::shared_ptr<const KeywordNameSet> keywo
else
{
std::list<KeywordName> accepted;
- WhitespaceTokeniser::get_instance()->tokenise(ak,
+ WhitespaceTokeniser::tokenise(ak,
create_inserter<KeywordName>(std::back_inserter(accepted)));
for (KeywordNameSet::ConstIterator k(keywords->begin()), k_end(keywords->end()) ;
diff --git a/paludis/environments/paludis/keywords_conf.cc b/paludis/environments/paludis/keywords_conf.cc
index 56ef1a0..3fb1a1b 100644
--- a/paludis/environments/paludis/keywords_conf.cc
+++ b/paludis/environments/paludis/keywords_conf.cc
@@ -90,7 +90,7 @@ KeywordsConf::add(const FSEntry & filename)
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
diff --git a/paludis/environments/paludis/licenses_conf.cc b/paludis/environments/paludis/licenses_conf.cc
index c4c65b3..c40f68b 100644
--- a/paludis/environments/paludis/licenses_conf.cc
+++ b/paludis/environments/paludis/licenses_conf.cc
@@ -88,7 +88,7 @@ LicensesConf::add(const FSEntry & filename)
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
diff --git a/paludis/environments/paludis/mirrors_conf.cc b/paludis/environments/paludis/mirrors_conf.cc
index 794acb4..a563b9f 100644
--- a/paludis/environments/paludis/mirrors_conf.cc
+++ b/paludis/environments/paludis/mirrors_conf.cc
@@ -74,7 +74,7 @@ MirrorsConf::add(const FSEntry & filename)
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
diff --git a/paludis/environments/paludis/use_conf.cc b/paludis/environments/paludis/use_conf.cc
index a8afd22..ff388ed 100644
--- a/paludis/environments/paludis/use_conf.cc
+++ b/paludis/environments/paludis/use_conf.cc
@@ -92,7 +92,7 @@ UseConf::add(const FSEntry & filename)
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
diff --git a/paludis/environments/portage/portage_environment.cc b/paludis/environments/portage/portage_environment.cc
index f5771d8..35426e9 100644
--- a/paludis/environments/portage/portage_environment.cc
+++ b/paludis/environments/portage/portage_environment.cc
@@ -152,7 +152,7 @@ namespace
return true;
std::set<std::string> use_expand;
- WhitespaceTokeniser::get_instance()->tokenise(k.get("USE_EXPAND"),
+ WhitespaceTokeniser::tokenise(k.get("USE_EXPAND"),
std::inserter(use_expand, use_expand.begin()));
if (use_expand.end() != use_expand.find(s))
return true;
@@ -193,16 +193,16 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
_add_portdir_repository(FSEntry(_imp->vars->get("PORTDIR")));
_add_vdb_repository();
std::list<FSEntry> portdir_overlay;
- WhitespaceTokeniser::get_instance()->tokenise(_imp->vars->get("PORTDIR_OVERLAY"),
+ WhitespaceTokeniser::tokenise(_imp->vars->get("PORTDIR_OVERLAY"),
create_inserter<FSEntry>(std::back_inserter(portdir_overlay)));
std::for_each(portdir_overlay.begin(), portdir_overlay.end(),
tr1::bind(tr1::mem_fn(&PortageEnvironment::_add_portdir_overlay_repository), this, _1));
/* use etc */
- WhitespaceTokeniser::get_instance()->tokenise(_imp->vars->get("USE"), std::inserter(_imp->use_with_expands,
+ WhitespaceTokeniser::tokenise(_imp->vars->get("USE"), std::inserter(_imp->use_with_expands,
_imp->use_with_expands.begin()));
- WhitespaceTokeniser::get_instance()->tokenise(_imp->vars->get("USE_EXPAND"), std::inserter(_imp->use_expand,
+ WhitespaceTokeniser::tokenise(_imp->vars->get("USE_EXPAND"), std::inserter(_imp->use_expand,
_imp->use_expand.begin()));
for (std::set<std::string>::const_iterator i(_imp->use_expand.begin()), i_end(_imp->use_expand.end()) ;
i != i_end ; ++i)
@@ -211,7 +211,7 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
std::transform(i->begin(), i->end(), std::back_inserter(lower_i), ::tolower);
std::set<std::string> values;
- WhitespaceTokeniser::get_instance()->tokenise(_imp->vars->get(*i), std::inserter(values,
+ WhitespaceTokeniser::tokenise(_imp->vars->get(*i), std::inserter(values,
values.begin()));
for (std::set<std::string>::const_iterator v(values.begin()), v_end(values.end()) ;
v != v_end ; ++v)
@@ -219,7 +219,7 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
}
/* accept keywords */
- WhitespaceTokeniser::get_instance()->tokenise(_imp->vars->get("ACCEPT_KEYWORDS"),
+ WhitespaceTokeniser::tokenise(_imp->vars->get("ACCEPT_KEYWORDS"),
std::inserter(_imp->accept_keywords, _imp->accept_keywords.begin()));
/* files */
@@ -233,7 +233,7 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
/* mirrors */
std::list<std::string> gentoo_mirrors;
- WhitespaceTokeniser::get_instance()->tokenise(_imp->vars->get("GENTOO_MIRRORS"),
+ WhitespaceTokeniser::tokenise(_imp->vars->get("GENTOO_MIRRORS"),
std::back_inserter(gentoo_mirrors));
for (std::list<std::string>::const_iterator m(gentoo_mirrors.begin()), m_end(gentoo_mirrors.end()) ;
m != m_end ; ++m)
@@ -246,7 +246,7 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
@@ -280,7 +280,7 @@ PortageEnvironment::_load_atom_file(const FSEntry & f, I_ i, const std::string &
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
if (tokens.empty())
continue;
diff --git a/paludis/hooker.cc b/paludis/hooker.cc
index 01e6e22..b833e2f 100644
--- a/paludis/hooker.cc
+++ b/paludis/hooker.cc
@@ -275,7 +275,7 @@ FancyHookFile::_add_dependency_class(const Hook & hook, DirectedGraph<std::strin
+ "' returned success '" + stringify(exit_status) + "', result '" + deps + "'");
std::set<std::string> deps_s;
- WhitespaceTokeniser::get_instance()->tokenise(deps, std::inserter(deps_s, deps_s.end()));
+ WhitespaceTokeniser::tokenise(deps, std::inserter(deps_s, deps_s.end()));
for (std::set<std::string>::const_iterator d(deps_s.begin()), d_end(deps_s.end()) ;
d != d_end ; ++d)
diff --git a/paludis/host_tuple_name.cc b/paludis/host_tuple_name.cc
index 421d2b6..2b46e69 100644
--- a/paludis/host_tuple_name.cc
+++ b/paludis/host_tuple_name.cc
@@ -49,10 +49,9 @@ HostTupleName::HostTupleName(const std::string & s) :
{
Context c("When creating a HostTupleName from '" + s + "':");
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser("-");
std::vector<std::string> tokens;
+ Tokeniser<delim_kind::AnyOfTag>::tokenise(s, "-", std::back_inserter(tokens));
- tokeniser.tokenise(s, std::back_inserter(tokens));
switch (tokens.size())
{
case 2: // Type 'arch'-'userland', i.e. 'spu-elf'.
diff --git a/paludis/install_task.cc b/paludis/install_task.cc
index 73f2ec4..1d29248 100644
--- a/paludis/install_task.cc
+++ b/paludis/install_task.cc
@@ -697,7 +697,7 @@ InstallTask::_main_actions()
tr1::shared_ptr<ConstTreeSequence<SetSpecTree, AllDepSpec> > all(new ConstTreeSequence<SetSpecTree, AllDepSpec>(
tr1::shared_ptr<AllDepSpec>(new AllDepSpec)));
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*_imp->add_to_world_spec, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*_imp->add_to_world_spec, std::back_inserter(tokens));
if ((! tokens.empty()) && ("(" == *tokens.begin()) && (")" == *previous(tokens.end())))
{
tokens.erase(tokens.begin());
diff --git a/paludis/repositories/cran/cran_dep_parser.cc b/paludis/repositories/cran/cran_dep_parser.cc
index e9216d0..b44002b 100644
--- a/paludis/repositories/cran/cran_dep_parser.cc
+++ b/paludis/repositories/cran/cran_dep_parser.cc
@@ -40,10 +40,9 @@ cranrepository::parse_depends(const std::string & s)
tr1::shared_ptr<ConstTreeSequence<DependencySpecTree, AllDepSpec> > result(
new ConstTreeSequence<DependencySpecTree, AllDepSpec>(tr1::shared_ptr<AllDepSpec>(new AllDepSpec)));
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> spec_tokeniser(",");
-
std::list<std::string> specs;
- spec_tokeniser.tokenise(s, std::back_inserter(specs));
+ Tokeniser<delim_kind::AnyOfTag>::tokenise(s, ",", std::back_inserter(specs));
+
std::list<std::string>::const_iterator a(specs.begin()), a_end(specs.end());
for ( ; a != a_end ; ++a)
{
diff --git a/paludis/repositories/cran/cran_installed_repository.cc b/paludis/repositories/cran/cran_installed_repository.cc
index ace3311..9c85c2a 100644
--- a/paludis/repositories/cran/cran_installed_repository.cc
+++ b/paludis/repositories/cran/cran_installed_repository.cc
@@ -284,7 +284,7 @@ CRANInstalledRepository::do_contents(const Package ID & id) const
++line_number;
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(line, std::back_inserter(tokens));
if (tokens.empty())
continue;
diff --git a/paludis/repositories/cran/cran_package_id.cc b/paludis/repositories/cran/cran_package_id.cc
index bbd29e4..735e929 100644
--- a/paludis/repositories/cran/cran_package_id.cc
+++ b/paludis/repositories/cran/cran_package_id.cc
@@ -193,7 +193,7 @@ CRANPackageID::CRANPackageID(const Environment * const env, const tr1::shared_pt
{
Context local_context("When handling Contains: key:");
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(file.get("Contains"), std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(file.get("Contains"), std::back_inserter(tokens));
_imp->contains_key.reset(new PackageIDSequenceKey(_imp->env, "Contains", "Contains", mkt_normal));
add_metadata_key(_imp->contains_key);
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
diff --git a/paludis/repositories/e/dep_lexer.cc b/paludis/repositories/e/dep_lexer.cc
index 04ee6fe..a933eaa 100644
--- a/paludis/repositories/e/dep_lexer.cc
+++ b/paludis/repositories/e/dep_lexer.cc
@@ -65,9 +65,8 @@ DepLexer::DepLexer(const std::string & s) :
{
Context context("When lexing dependency string '" + s + "':");
- Tokeniser<delim_kind::AnyOfTag, delim_mode::BoundaryTag> tokeniser(" \n\t");
std::vector<std::string> tokens;
- tokeniser.tokenise(s, std::back_inserter(tokens));
+ Tokeniser<delim_kind::AnyOfTag, delim_mode::BoundaryTag>::tokenise(s, " \n\t", std::back_inserter(tokens));
for (std::vector<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
diff --git a/paludis/repositories/e/dep_parser.cc b/paludis/repositories/e/dep_parser.cc
index 0ed3f06..7c211ff 100644
--- a/paludis/repositories/e/dep_parser.cc
+++ b/paludis/repositories/e/dep_parser.cc
@@ -790,7 +790,7 @@ paludis::erepository::parse_dependency_label(const std::string & s, const EAPI &
std::set<std::string> labels;
std::string label(s.substr(0, s.length() - 1));
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(",+").tokenise(label, std::inserter(labels, labels.end()));
+ Tokeniser<delim_kind::AnyOfTag>::tokenise(label, ",+", std::inserter(labels, labels.end()));
tr1::shared_ptr<DependencyLabelsDepSpec> l(new DependencyLabelsDepSpec);
diff --git a/paludis/repositories/e/e_key.cc b/paludis/repositories/e/e_key.cc
index 36f67bd..446b1ee 100644
--- a/paludis/repositories/e/e_key.cc
+++ b/paludis/repositories/e/e_key.cc
@@ -631,7 +631,7 @@ EIUseKey::value() const
Context context("When parsing metadata key '" + raw_name() + "' from '" + stringify(*_imp->id) + "':");
_imp->value.reset(new IUseFlagSet);
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(_imp->string_value, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(_imp->string_value, std::back_inserter(tokens));
tr1::shared_ptr<const UseFlagNameSet> prefixes;
if (_imp->id->repository()->use_interface)
@@ -884,7 +884,7 @@ EKeywordsKey::value() const
_imp->value.reset(new KeywordNameSet);
Context context("When parsing metadata key '" + raw_name() + "' from '" + stringify(*_imp->id) + "':");
- WhitespaceTokeniser::get_instance()->tokenise(_imp->string_value, create_inserter<KeywordName>(_imp->value->inserter()));
+ WhitespaceTokeniser::tokenise(_imp->string_value, create_inserter<KeywordName>(_imp->value->inserter()));
return _imp->value;
}
@@ -898,7 +898,7 @@ EKeywordsKey::idle_load() const
{
_imp->value.reset(new KeywordNameSet);
Context context("When parsing metadata key '" + raw_name() + "' from '" + stringify(*_imp->id) + "' as idle action:");
- WhitespaceTokeniser::get_instance()->tokenise(_imp->string_value, create_inserter<KeywordName>(_imp->value->inserter()));
+ WhitespaceTokeniser::tokenise(_imp->string_value, create_inserter<KeywordName>(_imp->value->inserter()));
_imp->value_used = tr1::bind(tr1::mem_fn(&IdleActionPool::increase_used_stat), IdleActionPool::get_instance());
return iar_success;
}
@@ -977,7 +977,7 @@ EUseKey::value() const
_imp->value.reset(new UseFlagNameSet);
Context context("When parsing metadata key '" + raw_name() + "' from '" + stringify(*_imp->id) + "':");
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(_imp->string_value, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(_imp->string_value, std::back_inserter(tokens));
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
if ('-' != t->at(0))
@@ -1048,7 +1048,7 @@ EInheritedKey::value() const
_imp->value.reset(new Set<std::string>);
Context context("When parsing metadata key '" + raw_name() + "' from '" + stringify(*_imp->id) + "':");
- WhitespaceTokeniser::get_instance()->tokenise(_imp->string_value, _imp->value->inserter());
+ WhitespaceTokeniser::tokenise(_imp->string_value, _imp->value->inserter());
return _imp->value;
}
@@ -1119,7 +1119,7 @@ EContentsKey::value() const
++line_number;
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(line, std::back_inserter(tokens));
if (tokens.empty())
continue;
diff --git a/paludis/repositories/e/e_repository.cc b/paludis/repositories/e/e_repository.cc
index 4b6e822..8bfc6d5 100644
--- a/paludis/repositories/e/e_repository.cc
+++ b/paludis/repositories/e/e_repository.cc
@@ -228,7 +228,7 @@ namespace paludis
for (LineConfigFile::ConstIterator line(f.begin()), line_end(f.end()) ; line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line,
+ WhitespaceTokeniser::tokenise(*line,
std::back_inserter(tokens));
if (tokens.size() < 3)
continue;
@@ -532,7 +532,7 @@ ERepository::need_mirrors() const
for (LineConfigFile::ConstIterator line(mirrors.begin()) ; line != mirrors.end() ; ++line)
{
std::vector<std::string> ee;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(ee));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(ee));
if (! ee.empty())
{
/* pick up to five random mirrors only */
@@ -587,7 +587,7 @@ ERepository::do_sync() const
return false;
std::list<std::string> sync_list;
- WhitespaceTokeniser::get_instance()->tokenise(_imp->params.sync, std::back_inserter(sync_list));
+ WhitespaceTokeniser::tokenise(_imp->params.sync, std::back_inserter(sync_list));
bool ok(false);
for (std::list<std::string>::const_iterator s(sync_list.begin()),
@@ -810,7 +810,7 @@ ERepository::do_use_expand_flags() const
i_end(_imp->profile_ptr->end_use_expand()) ; i != i_end ; ++i)
{
std::list<std::string> values;
- WhitespaceTokeniser::get_instance()->tokenise(_imp->profile_ptr->environment_variable(
+ WhitespaceTokeniser::tokenise(_imp->profile_ptr->environment_variable(
stringify(*i)), std::back_inserter(values));
for (std::list<std::string>::const_iterator j(values.begin()), j_end(values.end()) ;
j != j_end ; ++j)
diff --git a/paludis/repositories/e/e_repository_profile.cc b/paludis/repositories/e/e_repository_profile.cc
index 21e8f31..c72dbb6 100644
--- a/paludis/repositories/e/e_repository_profile.cc
+++ b/paludis/repositories/e/e_repository_profile.cc
@@ -304,8 +304,8 @@ Implementation<ERepositoryProfile>::load_profile_make_defaults(const FSEntry & d
if (is_incremental(k->first))
{
std::list<std::string> val, val_add;
- WhitespaceTokeniser::get_instance()->tokenise(environment_variables[k->first], std::back_inserter(val));
- WhitespaceTokeniser::get_instance()->tokenise(k->second, std::back_inserter(val_add));
+ WhitespaceTokeniser::tokenise(environment_variables[k->first], std::back_inserter(val));
+ WhitespaceTokeniser::tokenise(k->second, std::back_inserter(val_add));
for (std::list<std::string>::const_iterator v(val_add.begin()), v_end(val_add.end()) ;
v != v_end ; ++v)
@@ -332,7 +332,7 @@ Implementation<ERepositoryProfile>::load_profile_make_defaults(const FSEntry & d
{
use_expand.clear();
if (! use_expand_var.empty())
- WhitespaceTokeniser::get_instance()->tokenise(environment_variables[use_expand_var],
+ WhitespaceTokeniser::tokenise(environment_variables[use_expand_var],
create_inserter<UseFlagName>(std::inserter(use_expand, use_expand.end())));
}
catch (const Exception & e)
@@ -351,7 +351,7 @@ Implementation<ERepositoryProfile>::load_special_make_defaults_vars()
{
use.clear();
if (! use_var.empty())
- WhitespaceTokeniser::get_instance()->tokenise(environment_variables[use_var],
+ WhitespaceTokeniser::tokenise(environment_variables[use_var],
create_inserter<UseFlagName>(std::inserter(use, use.end())));
}
catch (const Exception & e)
@@ -366,7 +366,7 @@ Implementation<ERepositoryProfile>::load_special_make_defaults_vars()
{
use_expand.clear();
if (! use_expand_var.empty())
- WhitespaceTokeniser::get_instance()->tokenise(environment_variables[use_expand_var],
+ WhitespaceTokeniser::tokenise(environment_variables[use_expand_var],
create_inserter<UseFlagName>(std::inserter(use_expand, use_expand.end())));
}
catch (const Exception & e)
@@ -381,7 +381,7 @@ Implementation<ERepositoryProfile>::load_special_make_defaults_vars()
{
use_expand_hidden.clear();
if (! use_expand_hidden_var.empty())
- WhitespaceTokeniser::get_instance()->tokenise(environment_variables[use_expand_hidden_var],
+ WhitespaceTokeniser::tokenise(environment_variables[use_expand_hidden_var],
create_inserter<UseFlagName>(std::inserter(use_expand_hidden, use_expand_hidden.end())));
}
catch (const Exception & e)
@@ -454,7 +454,7 @@ Implementation<ERepositoryProfile>::make_vars_from_file_vars()
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
@@ -507,7 +507,7 @@ Implementation<ERepositoryProfile>::load_basic_use_file(const FSEntry & file, Fl
line != line_end ; ++line)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
@@ -542,7 +542,7 @@ Implementation<ERepositoryProfile>::load_spec_use_file(const FSEntry & file, Pac
line != line_end ; ++line)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
if (tokens.empty())
continue;
@@ -597,7 +597,7 @@ Implementation<ERepositoryProfile>::add_use_expand_to_use()
&::tolower);
std::list<std::string> uses;
- WhitespaceTokeniser::get_instance()->tokenise(environment_variables[stringify(*x)],
+ WhitespaceTokeniser::tokenise(environment_variables[stringify(*x)],
std::back_inserter(uses));
for (std::list<std::string>::const_iterator u(uses.begin()), u_end(uses.end()) ;
u != u_end ; ++u)
diff --git a/paludis/repositories/e/eapi.cc b/paludis/repositories/e/eapi.cc
index e18eb16..7c3e339 100644
--- a/paludis/repositories/e/eapi.cc
+++ b/paludis/repositories/e/eapi.cc
@@ -176,9 +176,9 @@ namespace paludis
))));
- WhitespaceTokeniser::get_instance()->tokenise(k.get("restrict_fetch"),
+ WhitespaceTokeniser::tokenise(k.get("restrict_fetch"),
eapi->supported->ebuild_options->restrict_fetch->inserter());
- WhitespaceTokeniser::get_instance()->tokenise(k.get("restrict_mirror"),
+ WhitespaceTokeniser::tokenise(k.get("restrict_mirror"),
eapi->supported->ebuild_options->restrict_mirror->inserter());
values.insert(std::make_pair(strip_trailing_string(d->basename(), ".conf"), eapi));
@@ -236,15 +236,14 @@ EAPILabels::EAPILabels(const std::string & s) :
PrivateImplementationPattern<EAPILabels>(new Implementation<EAPILabels>)
{
std::vector<std::string> tokens;
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tok(";");
- tok.tokenise(s, std::back_inserter(tokens));
+
+ Tokeniser<delim_kind::AnyOfTag>::tokenise(s, ";", std::back_inserter(tokens));
for (std::vector<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
{
std::vector<std::string> values;
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> vtok("=");
- vtok.tokenise(*t, std::back_inserter(values));
+ Tokeniser<delim_kind::AnyOfTag>::tokenise(*t, "=", std::back_inserter(values));
if (values.size() != 2)
throw EAPIConfigurationError("EAPI labels value '" + s + "' has bad values size '" + stringify(values.size()) + "'");
diff --git a/paludis/repositories/e/eapi_phase.cc b/paludis/repositories/e/eapi_phase.cc
index 7d365bf..e69a552 100644
--- a/paludis/repositories/e/eapi_phase.cc
+++ b/paludis/repositories/e/eapi_phase.cc
@@ -54,7 +54,7 @@ EAPIPhase::EAPIPhase(const std::string & s) :
Context c("When parsing EAPI phase '" + s + "'");
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(s, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(s, std::back_inserter(tokens));
std::list<std::string>::iterator t(std::find(tokens.begin(), tokens.end(), ":"));
if (t == tokens.end())
@@ -92,8 +92,7 @@ EAPIPhases::EAPIPhases(const std::string & s) :
Context c("When parsing EAPI phases '" + s + "'");
std::list<std::string> tokens;
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tok(";");
- tok.tokenise(s, std::back_inserter(tokens));
+ Tokeniser<delim_kind::AnyOfTag>::tokenise(s, ";", std::back_inserter(tokens));
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
_imp->phases.push_back(make_shared_ptr(new EAPIPhase(*t)));
diff --git a/paludis/repositories/e/ebuild.cc b/paludis/repositories/e/ebuild.cc
index 00e0e13..6ca35d5 100644
--- a/paludis/repositories/e/ebuild.cc
+++ b/paludis/repositories/e/ebuild.cc
@@ -254,7 +254,7 @@ namespace
std::string purdy(const std::string & s)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(s, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(s, std::back_inserter(tokens));
return join(tokens.begin(), tokens.end(), " \\n ");
}
}
diff --git a/paludis/repositories/e/ebuild_entries.cc b/paludis/repositories/e/ebuild_entries.cc
index 71b9eac..d80d4f6 100644
--- a/paludis/repositories/e/ebuild_entries.cc
+++ b/paludis/repositories/e/ebuild_entries.cc
@@ -234,7 +234,7 @@ namespace
/* possible values from profile */
std::set<UseFlagName> possible_values;
- WhitespaceTokeniser::get_instance()->tokenise(profile->environment_variable(stringify(*x)),
+ WhitespaceTokeniser::tokenise(profile->environment_variable(stringify(*x)),
create_inserter<UseFlagName>(std::inserter(possible_values, possible_values.end())));
/* possible values from environment */
diff --git a/paludis/repositories/e/ebuild_flat_metadata_cache.cc b/paludis/repositories/e/ebuild_flat_metadata_cache.cc
index 51ff788..9baae91 100644
--- a/paludis/repositories/e/ebuild_flat_metadata_cache.cc
+++ b/paludis/repositories/e/ebuild_flat_metadata_cache.cc
@@ -78,7 +78,7 @@ EbuildFlatMetadataCache::load(const tr1::shared_ptr<const EbuildID> & id)
{
time_t cache_time(std::max(_master_mtime, _filename.mtime()));
std::set<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(lines[9], std::inserter(tokens, tokens.begin()));
+ WhitespaceTokeniser::tokenise(lines[9], std::inserter(tokens, tokens.begin()));
ok = _ebuild.mtime() <= cache_time;
if (ok && ! tokens.empty())
@@ -151,7 +151,7 @@ namespace
std::string normalise(const T_ & s)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(stringify(s), std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(stringify(s), std::back_inserter(tokens));
return join(tokens.begin(), tokens.end(), " ");
}
diff --git a/paludis/repositories/e/make_ebin_repository.cc b/paludis/repositories/e/make_ebin_repository.cc
index 30bd74e..19c7a1a 100644
--- a/paludis/repositories/e/make_ebin_repository.cc
+++ b/paludis/repositories/e/make_ebin_repository.cc
@@ -71,7 +71,7 @@ paludis::make_ebin_repository(
tr1::shared_ptr<FSEntrySequence> profiles(new FSEntrySequence);
if (m->end() != m->find("profiles"))
- WhitespaceTokeniser::get_instance()->tokenise(m->find("profiles")->second,
+ WhitespaceTokeniser::tokenise(m->find("profiles")->second,
create_inserter<FSEntry>(std::back_inserter(*profiles)));
if (profiles->empty())
diff --git a/paludis/repositories/e/make_ebuild_repository.cc b/paludis/repositories/e/make_ebuild_repository.cc
index cc37ea4..827bc10 100644
--- a/paludis/repositories/e/make_ebuild_repository.cc
+++ b/paludis/repositories/e/make_ebuild_repository.cc
@@ -71,7 +71,7 @@ paludis::make_ebuild_repository(
tr1::shared_ptr<FSEntrySequence> profiles(new FSEntrySequence);
if (m->end() != m->find("profiles"))
- WhitespaceTokeniser::get_instance()->tokenise(m->find("profiles")->second,
+ WhitespaceTokeniser::tokenise(m->find("profiles")->second,
create_inserter<FSEntry>(std::back_inserter(*profiles)));
if (profiles->empty())
@@ -86,7 +86,7 @@ paludis::make_ebuild_repository(
tr1::shared_ptr<FSEntrySequence> eclassdirs(new FSEntrySequence);
if (m->end() != m->find("eclassdirs"))
- WhitespaceTokeniser::get_instance()->tokenise(m->find("eclassdirs")->second,
+ WhitespaceTokeniser::tokenise(m->find("eclassdirs")->second,
create_inserter<FSEntry>(std::back_inserter(*eclassdirs)));
if (eclassdirs->empty())
diff --git a/paludis/repositories/e/manifest2_reader.cc b/paludis/repositories/e/manifest2_reader.cc
index c2d03c0..2ba414c 100644
--- a/paludis/repositories/e/manifest2_reader.cc
+++ b/paludis/repositories/e/manifest2_reader.cc
@@ -75,7 +75,7 @@ Manifest2Reader::Manifest2Reader(const FSEntry & f) :
l != l_end ; ++l)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise((*l),
+ WhitespaceTokeniser::tokenise((*l),
create_inserter<std::string>(std::back_inserter(tokens)));
std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end());
diff --git a/paludis/repositories/e/qa/visibility.cc b/paludis/repositories/e/qa/visibility.cc
index bc139ca..8e48beb 100644
--- a/paludis/repositories/e/qa/visibility.cc
+++ b/paludis/repositories/e/qa/visibility.cc
@@ -289,7 +289,7 @@ paludis::erepository::visibility_check(
continue;
std::set<KeywordName> accepted_keywords, overlap;
- WhitespaceTokeniser::get_instance()->tokenise(p->profile->environment_variable(
+ WhitespaceTokeniser::tokenise(p->profile->environment_variable(
repo->accept_keywords_variable()), create_inserter<KeywordName>(std::inserter(accepted_keywords, accepted_keywords.begin())));
std::set_intersection(accepted_keywords.begin(), accepted_keywords.end(),
diff --git a/paludis/repositories/e/vdb_merger.cc b/paludis/repositories/e/vdb_merger.cc
index e129804..3fddb6e 100644
--- a/paludis/repositories/e/vdb_merger.cc
+++ b/paludis/repositories/e/vdb_merger.cc
@@ -58,9 +58,9 @@ namespace paludis
options(o),
realroot(options.root.realpath())
{
- WhitespaceTokeniser::get_instance()->tokenise(o.config_protect,
+ WhitespaceTokeniser::tokenise(o.config_protect,
std::back_inserter(config_protect));
- WhitespaceTokeniser::get_instance()->tokenise(o.config_protect_mask,
+ WhitespaceTokeniser::tokenise(o.config_protect_mask,
std::back_inserter(config_protect_mask));
}
};
diff --git a/paludis/repositories/e/vdb_repository.cc b/paludis/repositories/e/vdb_repository.cc
index 3aaccb8..9ccef78 100644
--- a/paludis/repositories/e/vdb_repository.cc
+++ b/paludis/repositories/e/vdb_repository.cc
@@ -861,7 +861,7 @@ VDBRepository::load_provided_using_cache() const
while (std::getline(provides_cache, line))
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(line, std::back_inserter(tokens));
if (tokens.size() < 3)
continue;
diff --git a/paludis/repositories/e/vdb_unmerger.cc b/paludis/repositories/e/vdb_unmerger.cc
index 6e52685..a91bb34 100644
--- a/paludis/repositories/e/vdb_unmerger.cc
+++ b/paludis/repositories/e/vdb_unmerger.cc
@@ -59,9 +59,9 @@ namespace paludis
Implementation(const VDBUnmergerOptions & o) :
options(o)
{
- WhitespaceTokeniser::get_instance()->tokenise(o.config_protect,
+ WhitespaceTokeniser::tokenise(o.config_protect,
std::back_inserter(config_protect));
- WhitespaceTokeniser::get_instance()->tokenise(o.config_protect_mask,
+ WhitespaceTokeniser::tokenise(o.config_protect_mask,
std::back_inserter(config_protect_mask));
}
};
@@ -210,7 +210,7 @@ VDBUnmerger::populate_unmerge_set()
while (std::getline(c, line))
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(line, std::back_inserter(tokens));
if (tokens.empty())
continue;
diff --git a/paludis/repositories/e/xml_things.cc b/paludis/repositories/e/xml_things.cc
index d3cc221..b9ab909 100644
--- a/paludis/repositories/e/xml_things.cc
+++ b/paludis/repositories/e/xml_things.cc
@@ -43,7 +43,7 @@ namespace
std::string normalise(const std::string & s)
{
std::list<std::string> words;
- WhitespaceTokeniser::get_instance()->tokenise(s, std::back_inserter(words));
+ WhitespaceTokeniser::tokenise(s, std::back_inserter(words));
return join(words.begin(), words.end(), " ");
}
@@ -96,7 +96,7 @@ namespace
if (name == "arch")
{
std::set<std::string> archs;
- WhitespaceTokeniser::get_instance()->tokenise(retarded_libxml_string_to_string(
+ WhitespaceTokeniser::tokenise(retarded_libxml_string_to_string(
xmlNodeListGetString(doc, a->xmlChildrenNode, 1)),
std::inserter(archs, archs.end()));
archs.erase("*");
diff --git a/paludis/repositories/fake/fake_package_id.cc b/paludis/repositories/fake/fake_package_id.cc
index f097b2f..282412a 100644
--- a/paludis/repositories/fake/fake_package_id.cc
+++ b/paludis/repositories/fake/fake_package_id.cc
@@ -98,7 +98,7 @@ void
FakeMetadataKeywordSetKey::set_from_string(const std::string & s)
{
_imp->collection.reset(new KeywordNameSet);
- WhitespaceTokeniser::get_instance()->tokenise(s, create_inserter<KeywordName>(_imp->collection->inserter()));
+ WhitespaceTokeniser::tokenise(s, create_inserter<KeywordName>(_imp->collection->inserter()));
}
FakeMetadataIUseSetKey::FakeMetadataIUseSetKey(const std::string & r,
@@ -114,7 +114,7 @@ FakeMetadataIUseSetKey::set_from_string(const std::string & s, const IUseFlagPar
{
_imp->collection.reset(new IUseFlagSet);
std::list<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(s, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(s, std::back_inserter(tokens));
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
_imp->collection->insert(IUseFlag(*t, m, std::string::npos));
diff --git a/paludis/repositories/unpackaged/ndbam.cc b/paludis/repositories/unpackaged/ndbam.cc
index dbd2f06..3cfedce 100644
--- a/paludis/repositories/unpackaged/ndbam.cc
+++ b/paludis/repositories/unpackaged/ndbam.cc
@@ -299,8 +299,7 @@ NDBAM::entries(const QualifiedPackageName & q)
try
{
std::vector<std::string> tokens;
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> t(":");
- t.tokenise(d->basename(), std::back_inserter(tokens));
+ Tokeniser<delim_kind::AnyOfTag>::tokenise(d->basename(), ":", std::back_inserter(tokens));
if (tokens.size() < 3)
{
Log::get_instance()->message(ll_warning, lc_context) << "Not using '" << *d <<
diff --git a/paludis/repositories/unpackaged/ndbam_merger.cc b/paludis/repositories/unpackaged/ndbam_merger.cc
index ab2d3c5..f45acdc 100644
--- a/paludis/repositories/unpackaged/ndbam_merger.cc
+++ b/paludis/repositories/unpackaged/ndbam_merger.cc
@@ -58,9 +58,9 @@ namespace paludis
options(o),
realroot(options.root.realpath())
{
- WhitespaceTokeniser::get_instance()->tokenise(o.config_protect,
+ WhitespaceTokeniser::tokenise(o.config_protect,
std::back_inserter(config_protect));
- WhitespaceTokeniser::get_instance()->tokenise(o.config_protect_mask,
+ WhitespaceTokeniser::tokenise(o.config_protect_mask,
std::back_inserter(config_protect_mask));
}
};
diff --git a/paludis/repositories/unpackaged/ndbam_unmerger.cc b/paludis/repositories/unpackaged/ndbam_unmerger.cc
index 1ca1c9e..be02a8d 100644
--- a/paludis/repositories/unpackaged/ndbam_unmerger.cc
+++ b/paludis/repositories/unpackaged/ndbam_unmerger.cc
@@ -64,9 +64,9 @@ namespace paludis
Implementation(const NDBAMUnmergerOptions & o) :
options(o)
{
- WhitespaceTokeniser::get_instance()->tokenise(o.config_protect,
+ WhitespaceTokeniser::tokenise(o.config_protect,
std::back_inserter(config_protect));
- WhitespaceTokeniser::get_instance()->tokenise(o.config_protect_mask,
+ WhitespaceTokeniser::tokenise(o.config_protect_mask,
std::back_inserter(config_protect_mask));
}
};
diff --git a/paludis/set_file.cc b/paludis/set_file.cc
index a2dc371..f8688e9 100644
--- a/paludis/set_file.cc
+++ b/paludis/set_file.cc
@@ -156,7 +156,7 @@ namespace
bool operator() (const std::string & l) const
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(l, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(l, std::back_inserter(tokens));
return (tokens.size() >= 1) && (tokens.at(1) == query);
}
@@ -177,7 +177,7 @@ namespace
try
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(line, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(line, std::back_inserter(tokens));
if (tokens.empty())
return;
diff --git a/paludis/util/config_file.cc b/paludis/util/config_file.cc
index ba77dde..d7a4c6e 100644
--- a/paludis/util/config_file.cc
+++ b/paludis/util/config_file.cc
@@ -706,8 +706,8 @@ KeyValueConfigFile::_parse(const Source & ss, const KeyValueConfigFileOptions &
{
std::list<std::string> values;
std::set<std::string> new_values;
- WhitespaceTokeniser::get_instance()->tokenise(get(key), std::back_inserter(values));
- WhitespaceTokeniser::get_instance()->tokenise(value, std::back_inserter(values));
+ WhitespaceTokeniser::tokenise(get(key), std::back_inserter(values));
+ WhitespaceTokeniser::tokenise(value, std::back_inserter(values));
for (std::list<std::string>::const_iterator v(values.begin()), v_end(values.end()) ;
v != v_end ; ++v)
if (v->empty())
diff --git a/paludis/util/files.m4 b/paludis/util/files.m4
index c41c93a..115d5b7 100644
--- a/paludis/util/files.m4
+++ b/paludis/util/files.m4
@@ -49,7 +49,7 @@ add(`strip', `hh', `cc', `test')
add(`system', `hh', `cc', `test', `testscript')
add(`thread', `hh', `cc', `test')
add(`thread_pool', `hh', `cc', `test')
-add(`tokeniser', `hh', `cc', `test')
+add(`tokeniser', `hh', `test')
add(`tr1_memory', `hh')
add(`tr1_type_traits', `hh')
add(`tr1_functional', `hh')
diff --git a/paludis/util/tokeniser.cc b/paludis/util/tokeniser.cc
deleted file mode 100644
index 88a00b6..0000000
--- a/paludis/util/tokeniser.cc
+++ /dev/null
@@ -1,31 +0,0 @@
-/* vim: set sw=4 sts=4 et foldmethod=syntax : */
-
-/*
- * Copyright (c) 2006, 2007 Ciaran McCreesh <ciaranm@ciaranm.org>
- *
- * This file is part of the Paludis package manager. Paludis is free software;
- * you can redistribute it and/or modify it under the terms of the GNU General
- * Public License version 2, as published by the Free Software Foundation.
- *
- * Paludis is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
- * details.
- *
- * You should have received a copy of the GNU General Public License along with
- * this program; if not, write to the Free Software Foundation, Inc., 59 Temple
- * Place, Suite 330, Boston, MA 02111-1307 USA
- */
-
-#include "tokeniser.hh"
-#include <paludis/util/instantiation_policy-impl.hh>
-
-using namespace paludis;
-
-template class InstantiationPolicy<WhitespaceTokeniser, instantiation_method::SingletonTag>;
-
-WhitespaceTokeniser::WhitespaceTokeniser() :
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(" \t\r\n")
-{
-}
-
diff --git a/paludis/util/tokeniser.hh b/paludis/util/tokeniser.hh
index ff099e1..9415961 100644
--- a/paludis/util/tokeniser.hh
+++ b/paludis/util/tokeniser.hh
@@ -152,7 +152,8 @@ namespace paludis
*
* \ingroup g_strings
*/
- template <typename DelimKind_, typename DelimMode_, typename Char_ = std::string::value_type>
+ template <typename DelimKind_, typename DelimMode_ = delim_mode::DelimiterTag,
+ typename Char_ = std::string::value_type>
struct Tokeniser;
/**
@@ -162,45 +163,37 @@ namespace paludis
* \nosubgrouping
*/
template <typename DelimMode_, typename Char_>
- class Tokeniser<delim_kind::AnyOfTag, DelimMode_, Char_> :
- private InstantiationPolicy<Tokeniser<delim_kind::AnyOfTag, DelimMode_, Char_>,
- instantiation_method::NonCopyableTag>
+ class Tokeniser<delim_kind::AnyOfTag, DelimMode_, Char_>
{
private:
- const std::basic_string<Char_> _delims;
+ Tokeniser();
public:
///\name Basic operations
///\{
- Tokeniser(const std::basic_string<Char_> & delims) :
- _delims(delims)
- {
- }
-
- ///\}
-
/**
* Do the tokenisation.
*/
template <typename Iter_>
- void tokenise(const std::basic_string<Char_> & s, Iter_ iter) const;
+ static void tokenise(const std::basic_string<Char_> & s,
+ const std::basic_string<Char_> & delims, Iter_ iter);
};
template <typename DelimMode_, typename Char_>
template <typename Iter_>
void
Tokeniser<delim_kind::AnyOfTag, DelimMode_, Char_>::tokenise(
- const std::basic_string<Char_> & s, Iter_ iter) const
+ const std::basic_string<Char_> & s, const std::basic_string<Char_> & delims, Iter_ iter)
{
typename std::basic_string<Char_>::size_type p(0), old_p(0);
- bool in_delim((! s.empty()) && std::basic_string<Char_>::npos != _delims.find(s[0]));
+ bool in_delim((! s.empty()) && std::basic_string<Char_>::npos != delims.find(s[0]));
for ( ; p < s.length() ; ++p)
{
if (in_delim)
{
- if (std::basic_string<Char_>::npos == _delims.find(s[p]))
+ if (std::basic_string<Char_>::npos == delims.find(s[p]))
{
tokeniser_internals::Writer<DelimMode_, Char_, Iter_>::handle_delim(
s.substr(old_p, p - old_p), iter);
@@ -210,7 +203,7 @@ namespace paludis
}
else
{
- if (std::basic_string<Char_>::npos != _delims.find(s[p]))
+ if (std::basic_string<Char_>::npos != delims.find(s[p]))
{
tokeniser_internals::Writer<DelimMode_, Char_, Iter_>::handle_token(
s.substr(old_p, p - old_p), iter);
@@ -232,18 +225,18 @@ namespace paludis
}
/**
- * Convenience singleton class for tokenising on whitespace.
+ * Convenience class for tokenising on whitespace.
*
* \ingroup g_strings
*/
- class PALUDIS_VISIBLE WhitespaceTokeniser :
- public InstantiationPolicy<WhitespaceTokeniser, instantiation_method::SingletonTag>,
- public Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag>
+ class PALUDIS_VISIBLE WhitespaceTokeniser
{
- friend class InstantiationPolicy<WhitespaceTokeniser, instantiation_method::SingletonTag>;
-
- private:
- WhitespaceTokeniser();
+ public:
+ template <typename Iter_>
+ static void tokenise(const std::string & s, Iter_ iter)
+ {
+ Tokeniser<delim_kind::AnyOfTag>::tokenise(s, " \t\r\n", iter);
+ }
};
}
diff --git a/paludis/util/tokeniser_TEST.cc b/paludis/util/tokeniser_TEST.cc
index 4cead06..94e6df2 100644
--- a/paludis/util/tokeniser_TEST.cc
+++ b/paludis/util/tokeniser_TEST.cc
@@ -39,15 +39,17 @@ namespace test_cases
*/
struct TestTokeniserAD : TestCase
{
- TestTokeniserAD() : TestCase("Tokeniser<AnyOfTag, DelimiterTag>") { }
+ TestTokeniserAD() : TestCase("Tokeniser<AnyOfTag, DelimiterTag(default)>") { }
void run()
{
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> t(",.+");
+ typedef Tokeniser<delim_kind::AnyOfTag> t;
+ const std::string delims(",.+");
+
std::vector<std::string> tokens;
TEST_CHECK(tokens.empty());
- t.tokenise("one,two...+...three...", std::back_inserter(tokens));
+ t::tokenise("one,two...+...three...", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(3));
TEST_CHECK_EQUAL(tokens.at(0), "one");
TEST_CHECK_EQUAL(tokens.at(1), "two");
@@ -55,7 +57,7 @@ namespace test_cases
tokens.clear();
TEST_CHECK(tokens.empty());
- t.tokenise("...one,two...+...three", std::back_inserter(tokens));
+ t::tokenise("...one,two...+...three", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(3));
TEST_CHECK_EQUAL(tokens.at(0), "one");
TEST_CHECK_EQUAL(tokens.at(1), "two");
@@ -63,18 +65,18 @@ namespace test_cases
tokens.clear();
TEST_CHECK(tokens.empty());
- t.tokenise("one", std::back_inserter(tokens));
+ t::tokenise("one", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(1));
TEST_CHECK_EQUAL(tokens.at(0), "one");
tokens.clear();
TEST_CHECK(tokens.empty());
- t.tokenise(".+.,.", std::back_inserter(tokens));
+ t::tokenise(".+.,.", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(0));
tokens.clear();
TEST_CHECK(tokens.empty());
- t.tokenise("", std::back_inserter(tokens));
+ t::tokenise("", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(0));
tokens.clear();
}
@@ -90,11 +92,13 @@ namespace test_cases
void run()
{
- Tokeniser<delim_kind::AnyOfTag, delim_mode::BoundaryTag> t(",.+");
+ typedef Tokeniser<delim_kind::AnyOfTag, delim_mode::BoundaryTag> t;
+ const std::string delims(",.+");
+
std::vector<std::string> tokens;
TEST_CHECK(tokens.empty());
- t.tokenise("one,two...+...three...", std::back_inserter(tokens));
+ t::tokenise("one,two...+...three...", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(6));
TEST_CHECK_EQUAL(tokens.at(0), "one");
TEST_CHECK_EQUAL(tokens.at(1), ",");
@@ -105,7 +109,7 @@ namespace test_cases
tokens.clear();
TEST_CHECK(tokens.empty());
- t.tokenise("...one,two...+...three", std::back_inserter(tokens));
+ t::tokenise("...one,two...+...three", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(6));
TEST_CHECK_EQUAL(tokens.at(0), "...");
TEST_CHECK_EQUAL(tokens.at(1), "one");
@@ -116,19 +120,19 @@ namespace test_cases
tokens.clear();
TEST_CHECK(tokens.empty());
- t.tokenise("one", std::back_inserter(tokens));
+ t::tokenise("one", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(1));
TEST_CHECK_EQUAL(tokens.at(0), "one");
tokens.clear();
TEST_CHECK(tokens.empty());
- t.tokenise(".+.,.", std::back_inserter(tokens));
+ t::tokenise(".+.,.", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(1));
TEST_CHECK_EQUAL(tokens.at(0), ".+.,.");
tokens.clear();
TEST_CHECK(tokens.empty());
- t.tokenise("", std::back_inserter(tokens));
+ t::tokenise("", delims, std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(0));
tokens.clear();
}
diff --git a/src/clients/adjutrix/downgrade_check.cc b/src/clients/adjutrix/downgrade_check.cc
index ed870c3..f59ac46 100644
--- a/src/clients/adjutrix/downgrade_check.cc
+++ b/src/clients/adjutrix/downgrade_check.cc
@@ -107,7 +107,7 @@ namespace
while (std::getline(f, s))
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(s, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(s, std::back_inserter(tokens));
if (tokens.size() != 3)
throw ConfigurationError("Bad line '" + s + "'");
diff --git a/src/clients/contrarius/stage.cc b/src/clients/contrarius/stage.cc
index f490445..094b0aa 100644
--- a/src/clients/contrarius/stage.cc
+++ b/src/clients/contrarius/stage.cc
@@ -45,7 +45,7 @@ bool
AuxiliaryStage::is_rebuild() const
{
std::list<std::string> packages;
- WhitespaceTokeniser::get_instance()->tokenise(TargetConfig::get_instance()->aux(), std::back_inserter(packages));
+ WhitespaceTokeniser::tokenise(TargetConfig::get_instance()->aux(), std::back_inserter(packages));
for (std::list<std::string>::const_iterator p(packages.begin()), p_end(packages.end()) ;
p != p_end ; ++p)
diff --git a/src/clients/contrarius/target_config.cc b/src/clients/contrarius/target_config.cc
index 7574e46..951e196 100644
--- a/src/clients/contrarius/target_config.cc
+++ b/src/clients/contrarius/target_config.cc
@@ -45,7 +45,7 @@ TargetConfig::_parse_defaults()
{
Context c2("While parsing line '" + *l + "'");
std::vector<std::string> tokens;
- WhitespaceTokeniser::get_instance()->tokenise(*l, std::back_inserter(tokens));
+ WhitespaceTokeniser::tokenise(*l, std::back_inserter(tokens));
SpecEntryList * list(&_binutils_list);
if ((("aux" == tokens[1]) || "headers" == tokens[1]) && (2 == std::distance(tokens.begin(), tokens.end())))
@@ -78,14 +78,13 @@ TargetConfig::_parse_defaults()
std::string
TargetConfig::_find_match(SpecEntryList & list)
{
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser("-");
std::vector<std::string> tokens;
for (SpecEntryList::const_iterator i(list.begin()), i_end(list.end()) ;
i != i_end ; ++i)
{
tokens.clear();
- tokeniser.tokenise(i->first, std::back_inserter(tokens));
+ Tokeniser<delim_kind::AnyOfTag>::tokenise(i->first, "-", std::back_inserter(tokens));
for (unsigned index(0) ; index < 4 ; ++index)
{
diff --git a/src/clients/reconcilio/broken_linkage_finder/configuration.cc b/src/clients/reconcilio/broken_linkage_finder/configuration.cc
index 011fa48..cd5a9e9 100644
--- a/src/clients/reconcilio/broken_linkage_finder/configuration.cc
+++ b/src/clients/reconcilio/broken_linkage_finder/configuration.cc
@@ -72,26 +72,30 @@ namespace
}
};
- template <typename T_, typename DelimKind_, typename DelimMode_, typename Char_>
+ template <typename Tokeniser_, typename T_>
void
from_string(const tr1::function<std::string (const std::string &)> & source,
- const std::string & varname, std::vector<T_> & vec,
- const Tokeniser<DelimKind_, DelimMode_, Char_> & tokeniser)
+ const std::string & varname, std::vector<T_> & vec, const std::string & delims)
{
std::string str(source(varname));
if (! str.empty())
{
Log::get_instance()->message(ll_debug, lc_context, "Got " + varname + "=\"" + str + "\"");
- tokeniser.tokenise(str, std::back_inserter(vec));
+ Tokeniser_::tokenise(str, delims, std::back_inserter(vec));
}
}
template <typename T_>
- inline void
+ void
from_string(const tr1::function<std::string (const std::string &)> & source,
const std::string & varname, std::vector<T_> & vec)
{
- from_string(source, varname, vec, *WhitespaceTokeniser::get_instance());
+ std::string str(source(varname));
+ if (! str.empty())
+ {
+ Log::get_instance()->message(ll_debug, lc_context, "Got " + varname + "=\"" + str + "\"");
+ WhitespaceTokeniser::tokenise(str, std::back_inserter(vec));
+ }
}
inline void
@@ -233,13 +237,14 @@ Implementation<Configuration>::load_from_etc_profile_env(const FSEntry & root)
opts += kvcfo_ignore_export;
KeyValueConfigFile kvs(etc_profile_env, opts);
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser(":");
+ typedef Tokeniser<delim_kind::AnyOfTag> Tokeniser;
+ const std::string delims(":");
tr1::function<std::string (const std::string &)> fromfile(
tr1::bind(&KeyValueConfigFile::get, tr1::cref(kvs), _1));
- from_string(fromfile, "PATH", search_dirs, tokeniser);
- from_string(fromfile, "ROOTPATH", search_dirs, tokeniser);
+ from_string<Tokeniser>(fromfile, "PATH", search_dirs, delims);
+ from_string<Tokeniser>(fromfile, "ROOTPATH", search_dirs, delims);
}
else if (etc_profile_env.exists())
Log::get_instance()->message(ll_warning, lc_context, "'" + stringify(etc_profile_env) + "' exists but is not a regular file");
@@ -282,20 +287,20 @@ Implementation<Configuration>::add_defaults()
static const std::string default_ld_so_conf("/lib /usr/lib");
Log::get_instance()->message(ll_debug, lc_context, "Got LD_LIBRARY_MASK=\"" + default_ld_library_mask + "\"");
- WhitespaceTokeniser::get_instance()->tokenise(
- default_ld_library_mask, std::back_inserter(ld_library_mask));
+ WhitespaceTokeniser::tokenise(
+ default_ld_library_mask, std::back_inserter(ld_library_mask));
Log::get_instance()->message(ll_debug, lc_context, "Got SEARCH_DIRS=\"" + default_search_dirs + "\"");
- WhitespaceTokeniser::get_instance()->tokenise(
- default_search_dirs, std::back_inserter(search_dirs));
+ WhitespaceTokeniser::tokenise(
+ default_search_dirs, std::back_inserter(search_dirs));
Log::get_instance()->message(ll_debug, lc_context, "Got SEARCH_DIRS_MASK=\"" + default_search_dirs_mask + "\"");
- WhitespaceTokeniser::get_instance()->tokenise(
- default_search_dirs_mask, std::back_inserter(search_dirs_mask));
+ WhitespaceTokeniser::tokenise(
+ default_search_dirs_mask, std::back_inserter(search_dirs_mask));
Log::get_instance()->message(ll_debug, lc_context, "Default ld.so.conf contents is \"" + default_ld_so_conf + "\"");
- WhitespaceTokeniser::get_instance()->tokenise(
- default_ld_so_conf, std::back_inserter(ld_so_conf));
+ WhitespaceTokeniser::tokenise(
+ default_ld_so_conf, std::back_inserter(ld_so_conf));
}
Configuration::DirsIterator
diff --git a/src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc b/src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc
index a1a03d0..23b8616 100644
--- a/src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc
+++ b/src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc
@@ -102,7 +102,7 @@ LibtoolLinkageChecker::check_file(const FSEntry & file)
try
{
KeyValueConfigFile kvs(stream, opts);
- WhitespaceTokeniser::get_instance()->tokenise(
+ WhitespaceTokeniser::tokenise(
kvs.get("dependency_libs"), std::back_inserter(deps));
}
catch (const ConfigFileError & ex)
diff --git a/src/output/console_query_task.cc b/src/output/console_query_task.cc
index 4fb6953..2650394 100644
--- a/src/output/console_query_task.cc
+++ b/src/output/console_query_task.cc
@@ -587,7 +587,7 @@ namespace
std::string normalise(const std::string & s)
{
std::list<std::string> w;
- WhitespaceTokeniser::get_instance()->tokenise(s, std::back_inserter(w));
+ WhitespaceTokeniser::tokenise(s, std::back_inserter(w));
return join(w.begin(), w.end(), " ");
}
}