aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAvatar Ciaran McCreesh <ciaran.mccreesh@googlemail.com> 2007-11-24 14:14:22 +0000
committerAvatar Ciaran McCreesh <ciaran.mccreesh@googlemail.com> 2007-11-24 14:14:22 +0000
commit98b8136f9e9374dac4cf860c0a1e504c6ec7ff3e (patch)
tree45f8b6eaf24858fc5702726f5e5c09a0766ed405
parent88e66f89fd170cfe378ec8801ced9e99422663a5 (diff)
downloadpaludis-98b8136f9e9374dac4cf860c0a1e504c6ec7ff3e.tar.gz
paludis-98b8136f9e9374dac4cf860c0a1e504c6ec7ff3e.tar.xz
New improved tokeniser
-rw-r--r--paludis/environments/no_config/no_config_environment.cc3
-rw-r--r--paludis/environments/paludis/keywords_conf.cc2
-rw-r--r--paludis/environments/paludis/licenses_conf.cc2
-rw-r--r--paludis/environments/paludis/mirrors_conf.cc2
-rw-r--r--paludis/environments/paludis/use_conf.cc2
-rw-r--r--paludis/environments/portage/portage_environment.cc18
-rw-r--r--paludis/hooker.cc2
-rw-r--r--paludis/host_tuple_name.cc2
-rw-r--r--paludis/install_task.cc2
-rw-r--r--paludis/repositories/cran/cran_dep_parser.cc2
-rw-r--r--paludis/repositories/cran/cran_package_id.cc2
-rw-r--r--paludis/repositories/e/dep_lexer.cc2
-rw-r--r--paludis/repositories/e/dep_parser.cc2
-rw-r--r--paludis/repositories/e/e_key.cc8
-rw-r--r--paludis/repositories/e/e_repository.cc9
-rw-r--r--paludis/repositories/e/e_repository_profile.cc20
-rw-r--r--paludis/repositories/e/eapi.cc10
-rw-r--r--paludis/repositories/e/eapi_phase.cc4
-rw-r--r--paludis/repositories/e/ebuild.cc2
-rw-r--r--paludis/repositories/e/ebuild_entries.cc2
-rw-r--r--paludis/repositories/e/ebuild_flat_metadata_cache.cc4
-rw-r--r--paludis/repositories/e/make_ebin_repository.cc2
-rw-r--r--paludis/repositories/e/make_ebuild_repository.cc4
-rw-r--r--paludis/repositories/e/manifest2_reader.cc3
-rw-r--r--paludis/repositories/e/qa/kv_variables.cc2
-rw-r--r--paludis/repositories/e/qa/visibility.cc2
-rw-r--r--paludis/repositories/e/qa/whitespace.cc2
-rw-r--r--paludis/repositories/e/vdb_merger.cc6
-rw-r--r--paludis/repositories/e/vdb_repository.cc2
-rw-r--r--paludis/repositories/e/vdb_unmerger.cc6
-rw-r--r--paludis/repositories/e/xml_things.cc5
-rw-r--r--paludis/repositories/fake/fake_package_id.cc4
-rw-r--r--paludis/repositories/unpackaged/dep_parser.cc2
-rw-r--r--paludis/repositories/unpackaged/ndbam.cc2
-rw-r--r--paludis/repositories/unpackaged/ndbam_merger.cc6
-rw-r--r--paludis/repositories/unpackaged/ndbam_unmerger.cc6
-rw-r--r--paludis/set_file.cc4
-rw-r--r--paludis/util/config_file.cc4
-rw-r--r--paludis/util/files.m42
-rw-r--r--paludis/util/tokeniser.cc28
-rw-r--r--paludis/util/tokeniser.hh304
-rw-r--r--paludis/util/tokeniser_TEST.cc64
-rw-r--r--src/clients/adjutrix/downgrade_check.cc2
-rw-r--r--src/clients/contrarius/stage.cc2
-rw-r--r--src/clients/contrarius/target_config.cc4
-rw-r--r--src/clients/reconcilio/broken_linkage_finder/configuration.cc24
-rw-r--r--src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc3
-rw-r--r--src/output/console_query_task.cc2
48 files changed, 394 insertions, 205 deletions
diff --git a/paludis/environments/no_config/no_config_environment.cc b/paludis/environments/no_config/no_config_environment.cc
index a003fb2..e0a0e69 100644
--- a/paludis/environments/no_config/no_config_environment.cc
+++ b/paludis/environments/no_config/no_config_environment.cc
@@ -339,8 +339,7 @@ NoConfigEnvironment::accept_keywords(tr1::shared_ptr<const KeywordNameSet> keywo
else
{
std::list<KeywordName> accepted;
- WhitespaceTokeniser::tokenise(ak,
- create_inserter<KeywordName>(std::back_inserter(accepted)));
+ tokenise_whitespace(ak, create_inserter<KeywordName>(std::back_inserter(accepted)));
for (KeywordNameSet::ConstIterator k(keywords->begin()), k_end(keywords->end()) ;
k != k_end ; ++k)
diff --git a/paludis/environments/paludis/keywords_conf.cc b/paludis/environments/paludis/keywords_conf.cc
index 410e68e..d0b33cc 100644
--- a/paludis/environments/paludis/keywords_conf.cc
+++ b/paludis/environments/paludis/keywords_conf.cc
@@ -90,7 +90,7 @@ KeywordsConf::add(const FSEntry & filename)
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
diff --git a/paludis/environments/paludis/licenses_conf.cc b/paludis/environments/paludis/licenses_conf.cc
index 7978fe8..14a61f6 100644
--- a/paludis/environments/paludis/licenses_conf.cc
+++ b/paludis/environments/paludis/licenses_conf.cc
@@ -89,7 +89,7 @@ LicensesConf::add(const FSEntry & filename)
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
diff --git a/paludis/environments/paludis/mirrors_conf.cc b/paludis/environments/paludis/mirrors_conf.cc
index 56cc93f..1ab8a27 100644
--- a/paludis/environments/paludis/mirrors_conf.cc
+++ b/paludis/environments/paludis/mirrors_conf.cc
@@ -75,7 +75,7 @@ MirrorsConf::add(const FSEntry & filename)
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
diff --git a/paludis/environments/paludis/use_conf.cc b/paludis/environments/paludis/use_conf.cc
index 49d076c..6a5f270 100644
--- a/paludis/environments/paludis/use_conf.cc
+++ b/paludis/environments/paludis/use_conf.cc
@@ -91,7 +91,7 @@ UseConf::add(const FSEntry & filename)
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
diff --git a/paludis/environments/portage/portage_environment.cc b/paludis/environments/portage/portage_environment.cc
index 8f8b239..5338e48 100644
--- a/paludis/environments/portage/portage_environment.cc
+++ b/paludis/environments/portage/portage_environment.cc
@@ -154,7 +154,7 @@ namespace
return true;
std::set<std::string> use_expand;
- WhitespaceTokeniser::tokenise(k.get("USE_EXPAND"),
+ tokenise_whitespace(k.get("USE_EXPAND"),
std::inserter(use_expand, use_expand.begin()));
if (use_expand.end() != use_expand.find(s))
return true;
@@ -195,16 +195,16 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
_add_portdir_repository(FSEntry(_imp->vars->get("PORTDIR")));
_add_vdb_repository();
std::list<FSEntry> portdir_overlay;
- WhitespaceTokeniser::tokenise(_imp->vars->get("PORTDIR_OVERLAY"),
+ tokenise_whitespace(_imp->vars->get("PORTDIR_OVERLAY"),
create_inserter<FSEntry>(std::back_inserter(portdir_overlay)));
std::for_each(portdir_overlay.begin(), portdir_overlay.end(),
tr1::bind(tr1::mem_fn(&PortageEnvironment::_add_portdir_overlay_repository), this, _1));
/* use etc */
- WhitespaceTokeniser::tokenise(_imp->vars->get("USE"), std::inserter(_imp->use_with_expands,
+ tokenise_whitespace(_imp->vars->get("USE"), std::inserter(_imp->use_with_expands,
_imp->use_with_expands.begin()));
- WhitespaceTokeniser::tokenise(_imp->vars->get("USE_EXPAND"), std::inserter(_imp->use_expand,
+ tokenise_whitespace(_imp->vars->get("USE_EXPAND"), std::inserter(_imp->use_expand,
_imp->use_expand.begin()));
for (std::set<std::string>::const_iterator i(_imp->use_expand.begin()), i_end(_imp->use_expand.end()) ;
i != i_end ; ++i)
@@ -213,7 +213,7 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
std::transform(i->begin(), i->end(), std::back_inserter(lower_i), ::tolower);
std::set<std::string> values;
- WhitespaceTokeniser::tokenise(_imp->vars->get(*i), std::inserter(values,
+ tokenise_whitespace(_imp->vars->get(*i), std::inserter(values,
values.begin()));
for (std::set<std::string>::const_iterator v(values.begin()), v_end(values.end()) ;
v != v_end ; ++v)
@@ -221,7 +221,7 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
}
/* accept keywords */
- WhitespaceTokeniser::tokenise(_imp->vars->get("ACCEPT_KEYWORDS"),
+ tokenise_whitespace(_imp->vars->get("ACCEPT_KEYWORDS"),
std::inserter(_imp->accept_keywords, _imp->accept_keywords.begin()));
/* files */
@@ -235,7 +235,7 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
/* mirrors */
std::list<std::string> gentoo_mirrors;
- WhitespaceTokeniser::tokenise(_imp->vars->get("GENTOO_MIRRORS"),
+ tokenise_whitespace(_imp->vars->get("GENTOO_MIRRORS"),
std::back_inserter(gentoo_mirrors));
for (std::list<std::string>::const_iterator m(gentoo_mirrors.begin()), m_end(gentoo_mirrors.end()) ;
m != m_end ; ++m)
@@ -248,7 +248,7 @@ PortageEnvironment::PortageEnvironment(const std::string & s) :
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
@@ -282,7 +282,7 @@ PortageEnvironment::_load_atom_file(const FSEntry & f, I_ i, const std::string &
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
if (tokens.empty())
continue;
diff --git a/paludis/hooker.cc b/paludis/hooker.cc
index d6937d9..1a2e4d9 100644
--- a/paludis/hooker.cc
+++ b/paludis/hooker.cc
@@ -275,7 +275,7 @@ FancyHookFile::_add_dependency_class(const Hook & hook, DirectedGraph<std::strin
+ "' returned success '" + stringify(exit_status) + "', result '" + deps + "'");
std::set<std::string> deps_s;
- WhitespaceTokeniser::tokenise(deps, std::inserter(deps_s, deps_s.end()));
+ tokenise_whitespace(deps, std::inserter(deps_s, deps_s.end()));
for (std::set<std::string>::const_iterator d(deps_s.begin()), d_end(deps_s.end()) ;
d != d_end ; ++d)
diff --git a/paludis/host_tuple_name.cc b/paludis/host_tuple_name.cc
index f3be82d..2248208 100644
--- a/paludis/host_tuple_name.cc
+++ b/paludis/host_tuple_name.cc
@@ -50,7 +50,7 @@ HostTupleName::HostTupleName(const std::string & s) :
Context c("When creating a HostTupleName from '" + s + "':");
std::vector<std::string> tokens;
- Tokeniser<delim_kind::AnyOfTag>::tokenise(s, "-", std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(s, "-", "", std::back_inserter(tokens));
switch (tokens.size())
{
diff --git a/paludis/install_task.cc b/paludis/install_task.cc
index 624a785..ead64d1 100644
--- a/paludis/install_task.cc
+++ b/paludis/install_task.cc
@@ -723,7 +723,7 @@ InstallTask::_main_actions()
tr1::shared_ptr<ConstTreeSequence<SetSpecTree, AllDepSpec> > all(new ConstTreeSequence<SetSpecTree, AllDepSpec>(
tr1::shared_ptr<AllDepSpec>(new AllDepSpec)));
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(*_imp->add_to_world_spec, std::back_inserter(tokens));
+ tokenise_whitespace(*_imp->add_to_world_spec, std::back_inserter(tokens));
if ((! tokens.empty()) && ("(" == *tokens.begin()) && (")" == *previous(tokens.end())))
{
tokens.erase(tokens.begin());
diff --git a/paludis/repositories/cran/cran_dep_parser.cc b/paludis/repositories/cran/cran_dep_parser.cc
index 3136c1a..a37eeba 100644
--- a/paludis/repositories/cran/cran_dep_parser.cc
+++ b/paludis/repositories/cran/cran_dep_parser.cc
@@ -39,7 +39,7 @@ cranrepository::parse_depends(const std::string & s)
new ConstTreeSequence<DependencySpecTree, AllDepSpec>(tr1::shared_ptr<AllDepSpec>(new AllDepSpec)));
std::list<std::string> specs;
- Tokeniser<delim_kind::AnyOfTag>::tokenise(s, ",", std::back_inserter(specs));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(s, ",", "", std::back_inserter(specs));
std::list<std::string>::const_iterator a(specs.begin()), a_end(specs.end());
for ( ; a != a_end ; ++a)
diff --git a/paludis/repositories/cran/cran_package_id.cc b/paludis/repositories/cran/cran_package_id.cc
index 6443a4c..07a9847 100644
--- a/paludis/repositories/cran/cran_package_id.cc
+++ b/paludis/repositories/cran/cran_package_id.cc
@@ -192,7 +192,7 @@ CRANPackageID::CRANPackageID(const Environment * const env, const tr1::shared_pt
{
Context local_context("When handling Contains: key:");
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(file.get("Contains"), std::back_inserter(tokens));
+ tokenise_whitespace(file.get("Contains"), std::back_inserter(tokens));
_imp->contains_key.reset(new PackageIDSequenceKey(_imp->env, "Contains", "Contains", mkt_normal));
add_metadata_key(_imp->contains_key);
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
diff --git a/paludis/repositories/e/dep_lexer.cc b/paludis/repositories/e/dep_lexer.cc
index 90c6a7c..81fbe02 100644
--- a/paludis/repositories/e/dep_lexer.cc
+++ b/paludis/repositories/e/dep_lexer.cc
@@ -63,7 +63,7 @@ DepLexer::DepLexer(const std::string & s) :
Context context("When lexing dependency string '" + s + "':");
std::vector<std::string> tokens;
- Tokeniser<delim_kind::AnyOfTag, delim_mode::BoundaryTag>::tokenise(s, " \n\t", std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::BoundaryTag>(s, " \n\t", "", std::back_inserter(tokens));
for (std::vector<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
diff --git a/paludis/repositories/e/dep_parser.cc b/paludis/repositories/e/dep_parser.cc
index e3bf493..cd52635 100644
--- a/paludis/repositories/e/dep_parser.cc
+++ b/paludis/repositories/e/dep_parser.cc
@@ -789,7 +789,7 @@ paludis::erepository::parse_dependency_label(const std::string & s, const EAPI &
std::set<std::string> labels;
std::string label(s.substr(0, s.length() - 1));
- Tokeniser<delim_kind::AnyOfTag>::tokenise(label, ",+", std::inserter(labels, labels.end()));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(label, ",+", "", std::inserter(labels, labels.end()));
tr1::shared_ptr<DependencyLabelsDepSpec> l(new DependencyLabelsDepSpec);
diff --git a/paludis/repositories/e/e_key.cc b/paludis/repositories/e/e_key.cc
index 8a9b2ac..876e7d6 100644
--- a/paludis/repositories/e/e_key.cc
+++ b/paludis/repositories/e/e_key.cc
@@ -564,7 +564,7 @@ EIUseKey::value() const
Context context("When parsing metadata key '" + raw_name() + "' from '" + stringify(*_imp->id) + "':");
_imp->value.reset(new IUseFlagSet);
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(_imp->string_value, std::back_inserter(tokens));
+ tokenise_whitespace(_imp->string_value, std::back_inserter(tokens));
tr1::shared_ptr<const UseFlagNameSet> prefixes;
if (_imp->id->repository()->use_interface)
@@ -792,7 +792,7 @@ EKeywordsKey::value() const
_imp->value.reset(new KeywordNameSet);
Context context("When parsing metadata key '" + raw_name() + "' from '" + stringify(*_imp->id) + "':");
- WhitespaceTokeniser::tokenise(_imp->string_value, create_inserter<KeywordName>(_imp->value->inserter()));
+ tokenise_whitespace(_imp->string_value, create_inserter<KeywordName>(_imp->value->inserter()));
return _imp->value;
}
@@ -860,7 +860,7 @@ EUseKey::value() const
_imp->value.reset(new UseFlagNameSet);
Context context("When parsing metadata key '" + raw_name() + "' from '" + stringify(*_imp->id) + "':");
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(_imp->string_value, std::back_inserter(tokens));
+ tokenise_whitespace(_imp->string_value, std::back_inserter(tokens));
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
if ('-' != t->at(0))
@@ -931,7 +931,7 @@ EInheritedKey::value() const
_imp->value.reset(new Set<std::string>);
Context context("When parsing metadata key '" + raw_name() + "' from '" + stringify(*_imp->id) + "':");
- WhitespaceTokeniser::tokenise(_imp->string_value, _imp->value->inserter());
+ tokenise_whitespace(_imp->string_value, _imp->value->inserter());
return _imp->value;
}
diff --git a/paludis/repositories/e/e_repository.cc b/paludis/repositories/e/e_repository.cc
index 269ca0f..604ce65 100644
--- a/paludis/repositories/e/e_repository.cc
+++ b/paludis/repositories/e/e_repository.cc
@@ -373,8 +373,7 @@ namespace paludis
for (LineConfigFile::ConstIterator line(f.begin()), line_end(f.end()) ; line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line,
- std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
if (tokens.size() < 3)
continue;
@@ -663,7 +662,7 @@ ERepository::need_mirrors() const
for (LineConfigFile::ConstIterator line(mirrors.begin()) ; line != mirrors.end() ; ++line)
{
std::vector<std::string> ee;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(ee));
+ tokenise_whitespace(*line, std::back_inserter(ee));
if (! ee.empty())
{
/* pick up to five random mirrors only */
@@ -718,7 +717,7 @@ ERepository::sync() const
return false;
std::list<std::string> sync_list;
- WhitespaceTokeniser::tokenise(_imp->params.sync, std::back_inserter(sync_list));
+ tokenise_whitespace(_imp->params.sync, std::back_inserter(sync_list));
bool ok(false);
for (std::list<std::string>::const_iterator s(sync_list.begin()),
@@ -868,7 +867,7 @@ ERepository::use_expand_flags() const
i_end(_imp->profile_ptr->end_use_expand()) ; i != i_end ; ++i)
{
std::list<std::string> values;
- WhitespaceTokeniser::tokenise(_imp->profile_ptr->environment_variable(
+ tokenise_whitespace(_imp->profile_ptr->environment_variable(
stringify(*i)), std::back_inserter(values));
for (std::list<std::string>::const_iterator j(values.begin()), j_end(values.end()) ;
j != j_end ; ++j)
diff --git a/paludis/repositories/e/e_repository_profile.cc b/paludis/repositories/e/e_repository_profile.cc
index 3a3905c..b62179f 100644
--- a/paludis/repositories/e/e_repository_profile.cc
+++ b/paludis/repositories/e/e_repository_profile.cc
@@ -310,8 +310,8 @@ Implementation<ERepositoryProfile>::load_profile_make_defaults(const FSEntry & d
if (is_incremental(k->first))
{
std::list<std::string> val, val_add;
- WhitespaceTokeniser::tokenise(environment_variables[k->first], std::back_inserter(val));
- WhitespaceTokeniser::tokenise(k->second, std::back_inserter(val_add));
+ tokenise_whitespace(environment_variables[k->first], std::back_inserter(val));
+ tokenise_whitespace(k->second, std::back_inserter(val_add));
for (std::list<std::string>::const_iterator v(val_add.begin()), v_end(val_add.end()) ;
v != v_end ; ++v)
@@ -338,7 +338,7 @@ Implementation<ERepositoryProfile>::load_profile_make_defaults(const FSEntry & d
{
use_expand.clear();
if (! use_expand_var.empty())
- WhitespaceTokeniser::tokenise(environment_variables[use_expand_var],
+ tokenise_whitespace(environment_variables[use_expand_var],
create_inserter<UseFlagName>(std::inserter(use_expand, use_expand.end())));
}
catch (const Exception & e)
@@ -357,7 +357,7 @@ Implementation<ERepositoryProfile>::load_special_make_defaults_vars()
{
use.clear();
if (! use_var.empty())
- WhitespaceTokeniser::tokenise(environment_variables[use_var],
+ tokenise_whitespace(environment_variables[use_var],
create_inserter<UseFlagName>(std::inserter(use, use.end())));
}
catch (const Exception & e)
@@ -372,7 +372,7 @@ Implementation<ERepositoryProfile>::load_special_make_defaults_vars()
{
use_expand.clear();
if (! use_expand_var.empty())
- WhitespaceTokeniser::tokenise(environment_variables[use_expand_var],
+ tokenise_whitespace(environment_variables[use_expand_var],
create_inserter<UseFlagName>(std::inserter(use_expand, use_expand.end())));
}
catch (const Exception & e)
@@ -387,7 +387,7 @@ Implementation<ERepositoryProfile>::load_special_make_defaults_vars()
{
use_expand_hidden.clear();
if (! use_expand_hidden_var.empty())
- WhitespaceTokeniser::tokenise(environment_variables[use_expand_hidden_var],
+ tokenise_whitespace(environment_variables[use_expand_hidden_var],
create_inserter<UseFlagName>(std::inserter(use_expand_hidden, use_expand_hidden.end())));
}
catch (const Exception & e)
@@ -460,7 +460,7 @@ Implementation<ERepositoryProfile>::make_vars_from_file_vars()
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
@@ -513,7 +513,7 @@ Implementation<ERepositoryProfile>::load_basic_use_file(const FSEntry & file, Fl
line != line_end ; ++line)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
@@ -548,7 +548,7 @@ Implementation<ERepositoryProfile>::load_spec_use_file(const FSEntry & file, Pac
line != line_end ; ++line)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(*line, std::back_inserter(tokens));
+ tokenise_whitespace(*line, std::back_inserter(tokens));
if (tokens.empty())
continue;
@@ -603,7 +603,7 @@ Implementation<ERepositoryProfile>::add_use_expand_to_use()
&::tolower);
std::list<std::string> uses;
- WhitespaceTokeniser::tokenise(environment_variables[stringify(*x)],
+ tokenise_whitespace(environment_variables[stringify(*x)],
std::back_inserter(uses));
for (std::list<std::string>::const_iterator u(uses.begin()), u_end(uses.end()) ;
u != u_end ; ++u)
diff --git a/paludis/repositories/e/eapi.cc b/paludis/repositories/e/eapi.cc
index db5a901..b2a933f 100644
--- a/paludis/repositories/e/eapi.cc
+++ b/paludis/repositories/e/eapi.cc
@@ -176,11 +176,11 @@ namespace paludis
))));
- WhitespaceTokeniser::tokenise(k.get("restrict_fetch"),
+ tokenise_whitespace(k.get("restrict_fetch"),
eapi->supported->ebuild_options->restrict_fetch->inserter());
- WhitespaceTokeniser::tokenise(k.get("restrict_mirror"),
+ tokenise_whitespace(k.get("restrict_mirror"),
eapi->supported->ebuild_options->restrict_mirror->inserter());
- WhitespaceTokeniser::tokenise(k.get("restrict_primaryuri"),
+ tokenise_whitespace(k.get("restrict_primaryuri"),
eapi->supported->ebuild_options->restrict_primaryuri->inserter());
values.insert(std::make_pair(strip_trailing_string(d->basename(), ".conf"), eapi));
@@ -239,13 +239,13 @@ EAPILabels::EAPILabels(const std::string & s) :
{
std::vector<std::string> tokens;
- Tokeniser<delim_kind::AnyOfTag>::tokenise(s, ";", std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(s, ";", "", std::back_inserter(tokens));
for (std::vector<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
{
std::vector<std::string> values;
- Tokeniser<delim_kind::AnyOfTag>::tokenise(*t, "=", std::back_inserter(values));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(*t, "=", "", std::back_inserter(values));
if (values.size() != 2)
throw EAPIConfigurationError("EAPI labels value '" + s + "' has bad values size '" + stringify(values.size()) + "'");
diff --git a/paludis/repositories/e/eapi_phase.cc b/paludis/repositories/e/eapi_phase.cc
index 936dae7..32a40d8 100644
--- a/paludis/repositories/e/eapi_phase.cc
+++ b/paludis/repositories/e/eapi_phase.cc
@@ -57,7 +57,7 @@ EAPIPhase::EAPIPhase(const std::string & s) :
Context c("When parsing EAPI phase '" + s + "'");
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(s, std::back_inserter(tokens));
+ tokenise_whitespace(s, std::back_inserter(tokens));
std::list<std::string>::iterator t(std::find(tokens.begin(), tokens.end(), ":"));
if (t == tokens.end())
@@ -95,7 +95,7 @@ EAPIPhases::EAPIPhases(const std::string & s) :
Context c("When parsing EAPI phases '" + s + "'");
std::list<std::string> tokens;
- Tokeniser<delim_kind::AnyOfTag>::tokenise(s, ";", std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(s, ";", "", std::back_inserter(tokens));
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
_imp->phases.push_back(make_shared_ptr(new EAPIPhase(*t)));
diff --git a/paludis/repositories/e/ebuild.cc b/paludis/repositories/e/ebuild.cc
index 8715eac..f84b036 100644
--- a/paludis/repositories/e/ebuild.cc
+++ b/paludis/repositories/e/ebuild.cc
@@ -253,7 +253,7 @@ namespace
std::string purdy(const std::string & s)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(s, std::back_inserter(tokens));
+ tokenise_whitespace(s, std::back_inserter(tokens));
return join(tokens.begin(), tokens.end(), " \\n ");
}
}
diff --git a/paludis/repositories/e/ebuild_entries.cc b/paludis/repositories/e/ebuild_entries.cc
index bafed93..13f0601 100644
--- a/paludis/repositories/e/ebuild_entries.cc
+++ b/paludis/repositories/e/ebuild_entries.cc
@@ -221,7 +221,7 @@ namespace
/* possible values from profile */
std::set<UseFlagName> possible_values;
- WhitespaceTokeniser::tokenise(profile->environment_variable(stringify(*x)),
+ tokenise_whitespace(profile->environment_variable(stringify(*x)),
create_inserter<UseFlagName>(std::inserter(possible_values, possible_values.end())));
/* possible values from environment */
diff --git a/paludis/repositories/e/ebuild_flat_metadata_cache.cc b/paludis/repositories/e/ebuild_flat_metadata_cache.cc
index 824edc2..de564f2 100644
--- a/paludis/repositories/e/ebuild_flat_metadata_cache.cc
+++ b/paludis/repositories/e/ebuild_flat_metadata_cache.cc
@@ -77,7 +77,7 @@ EbuildFlatMetadataCache::load(const tr1::shared_ptr<const EbuildID> & id)
{
time_t cache_time(std::max(_master_mtime, _filename.mtime()));
std::set<std::string> tokens;
- WhitespaceTokeniser::tokenise(lines[9], std::inserter(tokens, tokens.begin()));
+ tokenise_whitespace(lines[9], std::inserter(tokens, tokens.begin()));
ok = _ebuild.mtime() <= cache_time;
if (ok && ! tokens.empty())
@@ -152,7 +152,7 @@ namespace
std::string normalise(const T_ & s)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(stringify(s), std::back_inserter(tokens));
+ tokenise_whitespace(stringify(s), std::back_inserter(tokens));
return join(tokens.begin(), tokens.end(), " ");
}
diff --git a/paludis/repositories/e/make_ebin_repository.cc b/paludis/repositories/e/make_ebin_repository.cc
index dfd0ce6..682f445 100644
--- a/paludis/repositories/e/make_ebin_repository.cc
+++ b/paludis/repositories/e/make_ebin_repository.cc
@@ -76,7 +76,7 @@ paludis::make_ebin_repository(
tr1::shared_ptr<FSEntrySequence> profiles(new FSEntrySequence);
if (m->end() != m->find("profiles"))
- WhitespaceTokeniser::tokenise(m->find("profiles")->second,
+ tokenise_whitespace(m->find("profiles")->second,
create_inserter<FSEntry>(std::back_inserter(*profiles)));
if (profiles->empty())
diff --git a/paludis/repositories/e/make_ebuild_repository.cc b/paludis/repositories/e/make_ebuild_repository.cc
index 5f50796..247e856 100644
--- a/paludis/repositories/e/make_ebuild_repository.cc
+++ b/paludis/repositories/e/make_ebuild_repository.cc
@@ -76,7 +76,7 @@ paludis::make_ebuild_repository(
tr1::shared_ptr<FSEntrySequence> profiles(new FSEntrySequence);
if (m->end() != m->find("profiles"))
- WhitespaceTokeniser::tokenise(m->find("profiles")->second,
+ tokenise_whitespace(m->find("profiles")->second,
create_inserter<FSEntry>(std::back_inserter(*profiles)));
if (profiles->empty())
@@ -91,7 +91,7 @@ paludis::make_ebuild_repository(
tr1::shared_ptr<FSEntrySequence> eclassdirs(new FSEntrySequence);
if (m->end() != m->find("eclassdirs"))
- WhitespaceTokeniser::tokenise(m->find("eclassdirs")->second,
+ tokenise_whitespace(m->find("eclassdirs")->second,
create_inserter<FSEntry>(std::back_inserter(*eclassdirs)));
if (eclassdirs->empty())
diff --git a/paludis/repositories/e/manifest2_reader.cc b/paludis/repositories/e/manifest2_reader.cc
index 5a2b09f..9446a78 100644
--- a/paludis/repositories/e/manifest2_reader.cc
+++ b/paludis/repositories/e/manifest2_reader.cc
@@ -72,8 +72,7 @@ Manifest2Reader::Manifest2Reader(const FSEntry & f) :
l != l_end ; ++l)
{
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise((*l),
- create_inserter<std::string>(std::back_inserter(tokens)));
+ tokenise_whitespace((*l), create_inserter<std::string>(std::back_inserter(tokens)));
std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end());
std::string type, name, sha1, sha256, rmd160, md5;
diff --git a/paludis/repositories/e/qa/kv_variables.cc b/paludis/repositories/e/qa/kv_variables.cc
index 3e2218e..9125756 100644
--- a/paludis/repositories/e/qa/kv_variables.cc
+++ b/paludis/repositories/e/qa/kv_variables.cc
@@ -44,7 +44,7 @@ paludis::erepository::kv_variables_check(
<< entry << "', '" << *id << "', '" << name << "'";
std::list<std::string> lines;
- Tokeniser<delim_kind::AnyOfTag>::tokenise(content, "\n", std::back_inserter(lines));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(content, "\n", "", std::back_inserter(lines));
unsigned line(0);
for (std::list<std::string>::const_iterator l(lines.begin()), l_end(lines.end()) ;
diff --git a/paludis/repositories/e/qa/visibility.cc b/paludis/repositories/e/qa/visibility.cc
index 6fe517b..0df42ad 100644
--- a/paludis/repositories/e/qa/visibility.cc
+++ b/paludis/repositories/e/qa/visibility.cc
@@ -288,7 +288,7 @@ paludis::erepository::visibility_check(
continue;
std::set<KeywordName> accepted_keywords, overlap;
- WhitespaceTokeniser::tokenise(p->profile->environment_variable(
+ tokenise_whitespace(p->profile->environment_variable(
repo->accept_keywords_variable()), create_inserter<KeywordName>(std::inserter(accepted_keywords, accepted_keywords.begin())));
std::set_intersection(accepted_keywords.begin(), accepted_keywords.end(),
diff --git a/paludis/repositories/e/qa/whitespace.cc b/paludis/repositories/e/qa/whitespace.cc
index 705c0ae..4688ee7 100644
--- a/paludis/repositories/e/qa/whitespace.cc
+++ b/paludis/repositories/e/qa/whitespace.cc
@@ -40,7 +40,7 @@ paludis::erepository::whitespace_check(
<< entry << "', '" << *id << "', '" << name << "'";
std::list<std::string> lines;
- Tokeniser<delim_kind::AnyOfTag>::tokenise(content, "\n", std::back_inserter(lines));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(content, "\n", "", std::back_inserter(lines));
unsigned line(0), err_count(0);
for (std::list<std::string>::const_iterator l(lines.begin()), l_end(lines.end()) ;
diff --git a/paludis/repositories/e/vdb_merger.cc b/paludis/repositories/e/vdb_merger.cc
index 3a714b0..24ddf54 100644
--- a/paludis/repositories/e/vdb_merger.cc
+++ b/paludis/repositories/e/vdb_merger.cc
@@ -57,10 +57,8 @@ namespace paludis
options(o),
realroot(options.root.realpath())
{
- WhitespaceTokeniser::tokenise(o.config_protect,
- std::back_inserter(config_protect));
- WhitespaceTokeniser::tokenise(o.config_protect_mask,
- std::back_inserter(config_protect_mask));
+ tokenise_whitespace(o.config_protect, std::back_inserter(config_protect));
+ tokenise_whitespace(o.config_protect_mask, std::back_inserter(config_protect_mask));
}
};
}
diff --git a/paludis/repositories/e/vdb_repository.cc b/paludis/repositories/e/vdb_repository.cc
index 5a9d97b..913b6cf 100644
--- a/paludis/repositories/e/vdb_repository.cc
+++ b/paludis/repositories/e/vdb_repository.cc
@@ -871,7 +871,7 @@ VDBRepository::load_provided_using_cache() const
while (std::getline(provides_cache, line))
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(line, std::back_inserter(tokens));
+ tokenise_whitespace(line, std::back_inserter(tokens));
if (tokens.size() < 3)
continue;
diff --git a/paludis/repositories/e/vdb_unmerger.cc b/paludis/repositories/e/vdb_unmerger.cc
index 043f4cc..209970b 100644
--- a/paludis/repositories/e/vdb_unmerger.cc
+++ b/paludis/repositories/e/vdb_unmerger.cc
@@ -58,10 +58,8 @@ namespace paludis
Implementation(const VDBUnmergerOptions & o) :
options(o)
{
- WhitespaceTokeniser::tokenise(o.config_protect,
- std::back_inserter(config_protect));
- WhitespaceTokeniser::tokenise(o.config_protect_mask,
- std::back_inserter(config_protect_mask));
+ tokenise_whitespace(o.config_protect, std::back_inserter(config_protect));
+ tokenise_whitespace(o.config_protect_mask, std::back_inserter(config_protect_mask));
}
};
}
diff --git a/paludis/repositories/e/xml_things.cc b/paludis/repositories/e/xml_things.cc
index fcfb4d6..e4a75d3 100644
--- a/paludis/repositories/e/xml_things.cc
+++ b/paludis/repositories/e/xml_things.cc
@@ -43,7 +43,7 @@ namespace
std::string normalise(const std::string & s)
{
std::list<std::string> words;
- WhitespaceTokeniser::tokenise(s, std::back_inserter(words));
+ tokenise_whitespace(s, std::back_inserter(words));
return join(words.begin(), words.end(), " ");
}
@@ -96,8 +96,7 @@ namespace
if (name == "arch")
{
std::set<std::string> archs;
- WhitespaceTokeniser::tokenise(retarded_libxml_string_to_string(
- xmlNodeListGetString(doc, a->xmlChildrenNode, 1)),
+ tokenise_whitespace(retarded_libxml_string_to_string(xmlNodeListGetString(doc, a->xmlChildrenNode, 1)),
std::inserter(archs, archs.end()));
archs.erase("*");
for (std::set<std::string>::const_iterator r(archs.begin()), r_end(archs.end()) ;
diff --git a/paludis/repositories/fake/fake_package_id.cc b/paludis/repositories/fake/fake_package_id.cc
index adef75c..ede593c 100644
--- a/paludis/repositories/fake/fake_package_id.cc
+++ b/paludis/repositories/fake/fake_package_id.cc
@@ -98,7 +98,7 @@ void
FakeMetadataKeywordSetKey::set_from_string(const std::string & s)
{
_imp->collection.reset(new KeywordNameSet);
- WhitespaceTokeniser::tokenise(s, create_inserter<KeywordName>(_imp->collection->inserter()));
+ tokenise_whitespace(s, create_inserter<KeywordName>(_imp->collection->inserter()));
}
FakeMetadataIUseSetKey::FakeMetadataIUseSetKey(const std::string & r,
@@ -114,7 +114,7 @@ FakeMetadataIUseSetKey::set_from_string(const std::string & s, const IUseFlagPar
{
_imp->collection.reset(new IUseFlagSet);
std::list<std::string> tokens;
- WhitespaceTokeniser::tokenise(s, std::back_inserter(tokens));
+ tokenise_whitespace(s, std::back_inserter(tokens));
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
_imp->collection->insert(IUseFlag(*t, m, std::string::npos));
diff --git a/paludis/repositories/unpackaged/dep_parser.cc b/paludis/repositories/unpackaged/dep_parser.cc
index b5fe2b3..8416d29 100644
--- a/paludis/repositories/unpackaged/dep_parser.cc
+++ b/paludis/repositories/unpackaged/dep_parser.cc
@@ -36,7 +36,7 @@ DepParser::parse(const std::string & s)
new ConstTreeSequence<DependencySpecTree, AllDepSpec>(tr1::shared_ptr<AllDepSpec>(new AllDepSpec)));
std::list<std::string> tokens;
- Tokeniser<delim_kind::AnyOfTag>::tokenise(s, ",", std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::BoundaryTag>(s, ",", "", std::back_inserter(tokens));
for (std::list<std::string>::const_iterator t(tokens.begin()), t_end(tokens.end()) ;
t != t_end ; ++t)
diff --git a/paludis/repositories/unpackaged/ndbam.cc b/paludis/repositories/unpackaged/ndbam.cc
index 0276422..2a2a738 100644
--- a/paludis/repositories/unpackaged/ndbam.cc
+++ b/paludis/repositories/unpackaged/ndbam.cc
@@ -299,7 +299,7 @@ NDBAM::entries(const QualifiedPackageName & q)
try
{
std::vector<std::string> tokens;
- Tokeniser<delim_kind::AnyOfTag>::tokenise(d->basename(), ":", std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(d->basename(), ":", "", std::back_inserter(tokens));
if (tokens.size() < 3)
{
Log::get_instance()->message(ll_warning, lc_context) << "Not using '" << *d <<
diff --git a/paludis/repositories/unpackaged/ndbam_merger.cc b/paludis/repositories/unpackaged/ndbam_merger.cc
index 5674094..d0a2ac3 100644
--- a/paludis/repositories/unpackaged/ndbam_merger.cc
+++ b/paludis/repositories/unpackaged/ndbam_merger.cc
@@ -58,10 +58,8 @@ namespace paludis
options(o),
realroot(options.root.realpath())
{
- WhitespaceTokeniser::tokenise(o.config_protect,
- std::back_inserter(config_protect));
- WhitespaceTokeniser::tokenise(o.config_protect_mask,
- std::back_inserter(config_protect_mask));
+ tokenise_whitespace(o.config_protect, std::back_inserter(config_protect));
+ tokenise_whitespace(o.config_protect_mask, std::back_inserter(config_protect_mask));
}
};
}
diff --git a/paludis/repositories/unpackaged/ndbam_unmerger.cc b/paludis/repositories/unpackaged/ndbam_unmerger.cc
index 80f5982..5f49516 100644
--- a/paludis/repositories/unpackaged/ndbam_unmerger.cc
+++ b/paludis/repositories/unpackaged/ndbam_unmerger.cc
@@ -62,10 +62,8 @@ namespace paludis
Implementation(const NDBAMUnmergerOptions & o) :
options(o)
{
- WhitespaceTokeniser::tokenise(o.config_protect,
- std::back_inserter(config_protect));
- WhitespaceTokeniser::tokenise(o.config_protect_mask,
- std::back_inserter(config_protect_mask));
+ tokenise_whitespace(o.config_protect, std::back_inserter(config_protect));
+ tokenise_whitespace(o.config_protect_mask, std::back_inserter(config_protect_mask));
}
};
}
diff --git a/paludis/set_file.cc b/paludis/set_file.cc
index 366637e..476aaaa 100644
--- a/paludis/set_file.cc
+++ b/paludis/set_file.cc
@@ -155,7 +155,7 @@ namespace
bool operator() (const std::string & l) const
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(l, std::back_inserter(tokens));
+ tokenise_whitespace(l, std::back_inserter(tokens));
return (tokens.size() >= 1) && (tokens.at(1) == query);
}
@@ -176,7 +176,7 @@ namespace
try
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(line, std::back_inserter(tokens));
+ tokenise_whitespace(line, std::back_inserter(tokens));
if (tokens.empty())
return;
diff --git a/paludis/util/config_file.cc b/paludis/util/config_file.cc
index cd05552..0b11dcc 100644
--- a/paludis/util/config_file.cc
+++ b/paludis/util/config_file.cc
@@ -712,8 +712,8 @@ KeyValueConfigFile::_parse(const Source & ss, const KeyValueConfigFileOptions &
{
std::list<std::string> values;
std::set<std::string> new_values;
- WhitespaceTokeniser::tokenise(get(key), std::back_inserter(values));
- WhitespaceTokeniser::tokenise(value, std::back_inserter(values));
+ tokenise_whitespace(get(key), std::back_inserter(values));
+ tokenise_whitespace(value, std::back_inserter(values));
for (std::list<std::string>::const_iterator v(values.begin()), v_end(values.end()) ;
v != v_end ; ++v)
if (v->empty())
diff --git a/paludis/util/files.m4 b/paludis/util/files.m4
index 69aaba9..0ef5ed0 100644
--- a/paludis/util/files.m4
+++ b/paludis/util/files.m4
@@ -54,7 +54,7 @@ add(`strip', `hh', `cc', `test')
add(`system', `hh', `cc', `test', `testscript')
add(`thread', `hh', `cc', `test')
add(`thread_pool', `hh', `cc', `test')
-add(`tokeniser', `hh', `test')
+add(`tokeniser', `hh', `cc', `test')
add(`tr1_memory', `hh')
add(`tr1_type_traits', `hh')
add(`tr1_functional', `hh')
diff --git a/paludis/util/tokeniser.cc b/paludis/util/tokeniser.cc
new file mode 100644
index 0000000..96f45c1
--- /dev/null
+++ b/paludis/util/tokeniser.cc
@@ -0,0 +1,28 @@
+/* vim: set sw=4 sts=4 et foldmethod=syntax : */
+
+/*
+ * Copyright (c) 2007 Ciaran McCreesh
+ *
+ * This file is part of the Paludis package manager. Paludis is free software;
+ * you can redistribute it and/or modify it under the terms of the GNU General
+ * Public License version 2, as published by the Free Software Foundation.
+ *
+ * Paludis is distributed in the hope that it will be useful, but WITHOUT ANY
+ * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+ * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+ * details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+ * Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+#include <paludis/util/tokeniser.hh>
+
+using namespace paludis;
+
+TokeniserError::TokeniserError(const std::string & s, const std::string & msg) throw () :
+ Exception("When tokenising '" + s + "': " + msg)
+{
+}
+
diff --git a/paludis/util/tokeniser.hh b/paludis/util/tokeniser.hh
index 627d28e..6600d00 100644
--- a/paludis/util/tokeniser.hh
+++ b/paludis/util/tokeniser.hh
@@ -22,6 +22,8 @@
#include <iterator>
#include <paludis/util/instantiation_policy.hh>
+#include <paludis/util/exception.hh>
+#include <paludis/util/stringify.hh>
#include <string>
/** \file
@@ -85,7 +87,7 @@ namespace paludis
*
* \ingroup g_strings
*/
- template <typename DelimMode_, typename Char_, typename Iter_>
+ template <typename DelimMode_, typename Iter_>
struct Writer;
/**
@@ -94,13 +96,13 @@ namespace paludis
*
* \ingroup g_strings
*/
- template <typename Char_, typename Iter_>
- struct Writer<delim_mode::DelimiterTag, Char_, Iter_>
+ template <typename Iter_>
+ struct Writer<delim_mode::DelimiterTag, Iter_>
{
/**
* Handle a token.
*/
- static void handle_token(const std::basic_string<Char_> & s, Iter_ & i)
+ static void handle_token(const std::string & s, Iter_ & i)
{
*i++ = s;
}
@@ -108,7 +110,7 @@ namespace paludis
/**
* Handle a delimiter.
*/
- static void handle_delim(const std::basic_string<Char_> &, const Iter_ &)
+ static void handle_delim(const std::string &, const Iter_ &)
{
}
};
@@ -119,13 +121,13 @@ namespace paludis
*
* \ingroup g_strings
*/
- template <typename Char_, typename Iter_>
- struct Writer<delim_mode::BoundaryTag, Char_, Iter_>
+ template <typename Iter_>
+ struct Writer<delim_mode::BoundaryTag, Iter_>
{
/**
* Handle a token.
*/
- static void handle_token(const std::basic_string<Char_> & s, Iter_ & i)
+ static void handle_token(const std::string & s, Iter_ & i)
{
*i++ = s;
}
@@ -133,105 +135,265 @@ namespace paludis
/**
* Handle a delimiter.
*/
- static void handle_delim(const std::basic_string<Char_> & s, Iter_ & i)
+ static void handle_delim(const std::string & s, Iter_ & i)
{
*i++ = s;
}
};
- }
+ struct Lexer
+ {
+ const std::string text;
+ std::string::size_type text_pos;
+ std::string delims;
+ const std::string quotes;
- /**
- * Tokeniser splits up strings into smaller strings.
- *
- * \ingroup g_strings
- */
- template <typename DelimKind_, typename DelimMode_ = delim_mode::DelimiterTag,
- typename Char_ = std::string::value_type>
- struct Tokeniser;
+ std::string value;
+ enum { t_quote, t_delim, t_text } kind;
+
+ Lexer(const std::string & t, const std::string & d, const std::string & q) :
+ text(t),
+ text_pos(0),
+ delims(d),
+ quotes(q)
+ {
+ }
+
+ bool next()
+ {
+ if (text_pos >= text.length())
+ return false;
+
+ if (std::string::npos != delims.find(text[text_pos]))
+ {
+ std::string::size_type start_pos(text_pos);
+ while (++text_pos < text.length())
+ if (std::string::npos == delims.find(text[text_pos]))
+ break;
+
+ value = text.substr(start_pos, text_pos - start_pos);
+ kind = t_delim;
+ }
+ else if (std::string::npos != quotes.find(text[text_pos]))
+ {
+ value = std::string(1, text[text_pos]);
+ kind = t_quote;
+ ++text_pos;
+ }
+ else
+ {
+ std::string::size_type start_pos(text_pos);
+ while (++text_pos < text.length())
+ if (std::string::npos != delims.find(text[text_pos]))
+ break;
+ else if (std::string::npos != quotes.find(text[text_pos]))
+ break;
+ value = text.substr(start_pos, text_pos - start_pos);
+ kind = t_text;
+ }
+
+ return true;
+ }
+ };
+
+ template <typename DelimKind_, typename DelimMode_ = delim_mode::DelimiterTag>
+ struct Tokeniser;
+
+ template <typename DelimMode_>
+ class Tokeniser<delim_kind::AnyOfTag, DelimMode_>
+ {
+ private:
+ Tokeniser();
+
+ public:
+ template <typename Iter_>
+ static void tokenise(const std::string & s,
+ const std::string & delims,
+ const std::string & quotes,
+ Iter_ iter);
+ };
+ }
/**
- * Tokeniser: specialisation for delim_kind::AnyOfTag.
+ * Thrown if a Tokeniser encounters a syntax error (for example, mismatched quotes).
*
* \ingroup g_strings
- * \nosubgrouping
+ * \since 0.26
*/
- template <typename DelimMode_, typename Char_>
- class Tokeniser<delim_kind::AnyOfTag, DelimMode_, Char_>
+ class PALUDIS_VISIBLE TokeniserError :
+ public Exception
{
- private:
- Tokeniser();
-
public:
///\name Basic operations
///\{
- /**
- * Do the tokenisation.
- */
- template <typename Iter_>
- static void tokenise(const std::basic_string<Char_> & s,
- const std::basic_string<Char_> & delims, Iter_ iter);
+ TokeniserError(const std::string & s, const std::string & msg) throw ();
+
+ ///\}
};
- template <typename DelimMode_, typename Char_>
+ template <typename DelimMode_>
template <typename Iter_>
void
- Tokeniser<delim_kind::AnyOfTag, DelimMode_, Char_>::tokenise(
- const std::basic_string<Char_> & s, const std::basic_string<Char_> & delims, Iter_ iter)
+ tokeniser_internals::Tokeniser<delim_kind::AnyOfTag, DelimMode_>::tokenise(
+ const std::string & s,
+ const std::string & delims,
+ const std::string & quotes,
+ Iter_ iter)
{
- typename std::basic_string<Char_>::size_type p(0), old_p(0);
- bool in_delim((! s.empty()) && std::basic_string<Char_>::npos != delims.find(s[0]));
+ typedef tokeniser_internals::Lexer Lexer;
+ Lexer l(s, delims, quotes);
- for ( ; p < s.length() ; ++p)
+ enum { s_initial, s_had_quote, s_had_text, s_had_quote_text, s_had_quote_text_quote } state = s_initial;
+
+ while (l.next())
{
- if (in_delim)
+ switch (state)
{
- if (std::basic_string<Char_>::npos == delims.find(s[p]))
- {
- tokeniser_internals::Writer<DelimMode_, Char_, Iter_>::handle_delim(
- s.substr(old_p, p - old_p), iter);
- in_delim = false;
- old_p = p;
- }
- }
- else
- {
- if (std::basic_string<Char_>::npos != delims.find(s[p]))
- {
- tokeniser_internals::Writer<DelimMode_, Char_, Iter_>::handle_token(
- s.substr(old_p, p - old_p), iter);
- in_delim = true;
- old_p = p;
- }
+ case s_initial:
+ switch (l.kind)
+ {
+ case Lexer::t_quote:
+ state = s_had_quote;
+ l.delims = "";
+ break;
+
+ case Lexer::t_delim:
+ state = s_initial;
+ tokeniser_internals::Writer<DelimMode_, Iter_>::handle_delim(l.value, iter);
+ break;
+
+ case Lexer::t_text:
+ state = s_had_text;
+ tokeniser_internals::Writer<DelimMode_, Iter_>::handle_token(l.value, iter);
+ break;
+ }
+ break;
+
+ case s_had_quote:
+ switch (l.kind)
+ {
+ case Lexer::t_quote:
+ state = s_had_quote_text_quote;
+ l.delims = delims;
+ tokeniser_internals::Writer<DelimMode_, Iter_>::handle_token("", iter);
+ break;
+
+ case Lexer::t_delim:
+ throw InternalError(PALUDIS_HERE, "t_delim in s_had_quote");
+ break;
+
+ case Lexer::t_text:
+ state = s_had_quote_text;
+ tokeniser_internals::Writer<DelimMode_, Iter_>::handle_token(l.value, iter);
+ break;
+ }
+ break;
+
+ case s_had_quote_text:
+ switch (l.kind)
+ {
+ case Lexer::t_text:
+ throw InternalError(PALUDIS_HERE, "t_text in s_had_quote_text");
+ break;
+
+ case Lexer::t_delim:
+ throw InternalError(PALUDIS_HERE, "t_delim in s_had_quote_text");
+ break;
+
+ case Lexer::t_quote:
+ state = s_had_quote_text_quote;
+ l.delims = delims;
+ break;
+ }
+ break;
+
+ case s_had_quote_text_quote:
+ switch (l.kind)
+ {
+ case Lexer::t_text:
+ throw TokeniserError(s, "Close quote followed by text");
+ break;
+
+ case Lexer::t_quote:
+ throw TokeniserError(s, "Close quote followed by quote");
+ break;
+
+ case Lexer::t_delim:
+ state = s_initial;
+ tokeniser_internals::Writer<DelimMode_, Iter_>::handle_delim(l.value, iter);
+ break;
+ }
+ break;
+
+ case s_had_text:
+ switch (l.kind)
+ {
+ case Lexer::t_text:
+ throw InternalError(PALUDIS_HERE, "t_text in s_had_text");
+ break;
+
+ case Lexer::t_quote:
+ throw TokeniserError(s, "Text followed by quote");
+ break;
+
+ case Lexer::t_delim:
+ state = s_initial;
+ tokeniser_internals::Writer<DelimMode_, Iter_>::handle_delim(l.value, iter);
+ break;
+ }
+ break;
}
}
- if (old_p != p)
+ switch (state)
{
- if (in_delim)
- tokeniser_internals::Writer<DelimMode_, Char_, Iter_>::handle_delim(
- s.substr(old_p, p - old_p), iter);
- else
- tokeniser_internals::Writer<DelimMode_, Char_, Iter_>::handle_token(
- s.substr(old_p, p - old_p), iter);
+ case s_initial:
+ case s_had_text:
+ case s_had_quote_text_quote:
+ return;
+
+ case s_had_quote:
+ case s_had_quote_text:
+ throw TokeniserError(s, "Unterminated quoted string");
}
}
/**
- * Convenience class for tokenising on whitespace.
+ * Tokenise a string.
*
* \ingroup g_strings
+ * \since 0.26
*/
- class PALUDIS_VISIBLE WhitespaceTokeniser
+ template <typename DelimKind_, typename DelimMode_, typename Iter_>
+ void tokenise(const std::string & s, const std::string & delims, const std::string & quotes, Iter_ iter)
{
- public:
- template <typename Iter_>
- static void tokenise(const std::string & s, Iter_ iter)
- {
- Tokeniser<delim_kind::AnyOfTag>::tokenise(s, " \t\r\n", iter);
- }
- };
+ tokeniser_internals::Tokeniser<DelimKind_, DelimMode_>::template tokenise<Iter_>(s, delims, quotes, iter);
+ }
+
+ /**
+ * Convenience function: tokenise on whitespace.
+ *
+ * \ingroup g_strings
+ * \since 0.26
+ */
+ template <typename Iter_>
+ void tokenise_whitespace(const std::string & s, Iter_ iter)
+ {
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(s, " \t\r\n", "", iter);
+ }
+
+ /**
+ * Convenience function: tokenise on whitespace, handling quoted strings.
+ *
+ * \ingroup g_strings
+ * \since 0.26
+ */
+ template <typename Iter_>
+ void tokenise_whitespace_quoted(const std::string &s, Iter_ iter)
+ {
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(s, " \t\r\n", "'\"", iter);
+ }
}
#endif
diff --git a/paludis/util/tokeniser_TEST.cc b/paludis/util/tokeniser_TEST.cc
index 3ceb1ac..37514d4 100644
--- a/paludis/util/tokeniser_TEST.cc
+++ b/paludis/util/tokeniser_TEST.cc
@@ -19,6 +19,7 @@
#include <iterator>
#include <paludis/util/tokeniser.hh>
+#include <paludis/util/join.hh>
#include <test/test_framework.hh>
#include <test/test_runner.hh>
#include <vector>
@@ -26,30 +27,20 @@
using namespace test;
using namespace paludis;
-/** \file
- * Test cases for tokeniser.hh .
- *
- */
-
namespace test_cases
{
- /**
- * \test Test Tokeniser<AnyOfTag, DelimiterTag>
- *
- */
struct TestTokeniserAD : TestCase
{
TestTokeniserAD() : TestCase("Tokeniser<AnyOfTag, DelimiterTag(default)>") { }
void run()
{
- typedef Tokeniser<delim_kind::AnyOfTag> t;
const std::string delims(",.+");
std::vector<std::string> tokens;
TEST_CHECK(tokens.empty());
- t::tokenise("one,two...+...three...", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>("one,two...+...three...", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(3));
TEST_CHECK_EQUAL(tokens.at(0), "one");
TEST_CHECK_EQUAL(tokens.at(1), "two");
@@ -57,7 +48,7 @@ namespace test_cases
tokens.clear();
TEST_CHECK(tokens.empty());
- t::tokenise("...one,two...+...three", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>("...one,two...+...three", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(3));
TEST_CHECK_EQUAL(tokens.at(0), "one");
TEST_CHECK_EQUAL(tokens.at(1), "two");
@@ -65,40 +56,35 @@ namespace test_cases
tokens.clear();
TEST_CHECK(tokens.empty());
- t::tokenise("one", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>("one", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(1));
TEST_CHECK_EQUAL(tokens.at(0), "one");
tokens.clear();
TEST_CHECK(tokens.empty());
- t::tokenise(".+.,.", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(".+.,.", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(0));
tokens.clear();
TEST_CHECK(tokens.empty());
- t::tokenise("", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>("", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(0));
tokens.clear();
}
} test_tokeniser_ad;
- /**
- * \test Test Tokeniser<AnyOfTag, BoundaryTag>
- *
- */
struct TestTokeniserAB : TestCase
{
TestTokeniserAB() : TestCase("Tokeniser<AnyOfTag, BoundaryTag>") { }
void run()
{
- typedef Tokeniser<delim_kind::AnyOfTag, delim_mode::BoundaryTag> t;
const std::string delims(",.+");
std::vector<std::string> tokens;
TEST_CHECK(tokens.empty());
- t::tokenise("one,two...+...three...", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::BoundaryTag>("one,two...+...three...", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(6));
TEST_CHECK_EQUAL(tokens.at(0), "one");
TEST_CHECK_EQUAL(tokens.at(1), ",");
@@ -109,7 +95,7 @@ namespace test_cases
tokens.clear();
TEST_CHECK(tokens.empty());
- t::tokenise("...one,two...+...three", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::BoundaryTag>("...one,two...+...three", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(6));
TEST_CHECK_EQUAL(tokens.at(0), "...");
TEST_CHECK_EQUAL(tokens.at(1), "one");
@@ -120,21 +106,49 @@ namespace test_cases
tokens.clear();
TEST_CHECK(tokens.empty());
- t::tokenise("one", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::BoundaryTag>("one", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(1));
TEST_CHECK_EQUAL(tokens.at(0), "one");
tokens.clear();
TEST_CHECK(tokens.empty());
- t::tokenise(".+.,.", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::BoundaryTag>(".+.,.", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(1));
TEST_CHECK_EQUAL(tokens.at(0), ".+.,.");
tokens.clear();
TEST_CHECK(tokens.empty());
- t::tokenise("", delims, std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::BoundaryTag>("", delims, "", std::back_inserter(tokens));
TEST_CHECK_EQUAL(tokens.size(), std::size_t(0));
tokens.clear();
}
} test_tokeniser_ab;
+
+ struct QuotedWhitespaceTokeniserTest : TestCase
+ {
+ QuotedWhitespaceTokeniserTest() : TestCase("quoted whitespace tokeniser") { }
+
+ void run()
+ {
+ std::vector<std::string> v1;
+ tokenise_whitespace_quoted("one \"two three\" four 'five six' seven '' eight", std::back_inserter(v1));
+ TestMessageSuffix s(join(v1.begin(), v1.end(), "#"));
+ TEST_CHECK_EQUAL(v1.size(), 7u);
+ TEST_CHECK_EQUAL(v1.at(0), "one");
+ TEST_CHECK_EQUAL(v1.at(1), "two three");
+ TEST_CHECK_EQUAL(v1.at(2), "four");
+ TEST_CHECK_EQUAL(v1.at(3), "five six");
+ TEST_CHECK_EQUAL(v1.at(4), "seven");
+ TEST_CHECK_EQUAL(v1.at(5), "");
+ TEST_CHECK_EQUAL(v1.at(6), "eight");
+
+ TEST_CHECK_THROWS(tokenise_whitespace_quoted("one \"two three", std::back_inserter(v1)), TokeniserError);
+ TEST_CHECK_THROWS(tokenise_whitespace_quoted("one tw\"o\" three", std::back_inserter(v1)), TokeniserError);
+ TEST_CHECK_THROWS(tokenise_whitespace_quoted("one tw\"o", std::back_inserter(v1)), TokeniserError);
+ TEST_CHECK_THROWS(tokenise_whitespace_quoted("one tw\"o\"three", std::back_inserter(v1)), TokeniserError);
+ TEST_CHECK_THROWS(tokenise_whitespace_quoted("one \"two\"three\"", std::back_inserter(v1)), TokeniserError);
+ TEST_CHECK_THROWS(tokenise_whitespace_quoted("one \"two\"\"three\"", std::back_inserter(v1)), TokeniserError);
+ }
+ } test_quoted_whitespace_tokeniser;
}
+
diff --git a/src/clients/adjutrix/downgrade_check.cc b/src/clients/adjutrix/downgrade_check.cc
index 6c01db7..56a3540 100644
--- a/src/clients/adjutrix/downgrade_check.cc
+++ b/src/clients/adjutrix/downgrade_check.cc
@@ -109,7 +109,7 @@ namespace
while (std::getline(f, s))
{
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(s, std::back_inserter(tokens));
+ tokenise_whitespace(s, std::back_inserter(tokens));
if (tokens.size() != 3)
throw ConfigurationError("Bad line '" + s + "'");
diff --git a/src/clients/contrarius/stage.cc b/src/clients/contrarius/stage.cc
index 61b3b3d..5467ea9 100644
--- a/src/clients/contrarius/stage.cc
+++ b/src/clients/contrarius/stage.cc
@@ -45,7 +45,7 @@ bool
AuxiliaryStage::is_rebuild() const
{
std::list<std::string> packages;
- WhitespaceTokeniser::tokenise(TargetConfig::get_instance()->aux(), std::back_inserter(packages));
+ tokenise_whitespace(TargetConfig::get_instance()->aux(), std::back_inserter(packages));
for (std::list<std::string>::const_iterator p(packages.begin()), p_end(packages.end()) ;
p != p_end ; ++p)
diff --git a/src/clients/contrarius/target_config.cc b/src/clients/contrarius/target_config.cc
index 910fbe5..7bd38c0 100644
--- a/src/clients/contrarius/target_config.cc
+++ b/src/clients/contrarius/target_config.cc
@@ -47,7 +47,7 @@ TargetConfig::_parse_defaults()
{
Context c2("While parsing line '" + *l + "'");
std::vector<std::string> tokens;
- WhitespaceTokeniser::tokenise(*l, std::back_inserter(tokens));
+ tokenise_whitespace(*l, std::back_inserter(tokens));
SpecEntryList * list(&_binutils_list);
if ((("aux" == tokens[1]) || "headers" == tokens[1]) && (2 == std::distance(tokens.begin(), tokens.end())))
@@ -86,7 +86,7 @@ TargetConfig::_find_match(SpecEntryList & list)
i != i_end ; ++i)
{
tokens.clear();
- Tokeniser<delim_kind::AnyOfTag>::tokenise(i->first, "-", std::back_inserter(tokens));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(i->first, "-", "", std::back_inserter(tokens));
for (unsigned index(0) ; index < 4 ; ++index)
{
diff --git a/src/clients/reconcilio/broken_linkage_finder/configuration.cc b/src/clients/reconcilio/broken_linkage_finder/configuration.cc
index da4189c..a14805e 100644
--- a/src/clients/reconcilio/broken_linkage_finder/configuration.cc
+++ b/src/clients/reconcilio/broken_linkage_finder/configuration.cc
@@ -73,16 +73,16 @@ namespace
}
};
- template <typename Tokeniser_, typename T_>
+ template <typename T_>
void
- from_string(const tr1::function<std::string (const std::string &)> & source,
- const std::string & varname, std::vector<T_> & vec, const std::string & delims)
+ from_colon_string(const tr1::function<std::string (const std::string &)> & source,
+ const std::string & varname, std::vector<T_> & vec)
{
std::string str(source.operator() (varname)); /* silly 4.3 ICE */
if (! str.empty())
{
Log::get_instance()->message(ll_debug, lc_context, "Got " + varname + "=\"" + str + "\"");
- Tokeniser_::tokenise(str, delims, std::back_inserter(vec));
+ tokenise<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(str, ":", "", std::back_inserter(vec));
}
}
@@ -95,7 +95,7 @@ namespace
if (! str.empty())
{
Log::get_instance()->message(ll_debug, lc_context, "Got " + varname + "=\"" + str + "\"");
- WhitespaceTokeniser::tokenise(str, std::back_inserter(vec));
+ tokenise_whitespace(str, std::back_inserter(vec));
}
}
@@ -238,14 +238,12 @@ Implementation<Configuration>::load_from_etc_profile_env(const FSEntry & root)
opts += kvcfo_ignore_export;
KeyValueConfigFile kvs(etc_profile_env, opts);
- typedef Tokeniser<delim_kind::AnyOfTag> Tokeniser;
- const std::string delims(":");
tr1::function<std::string (const std::string &)> fromfile(
tr1::bind(&KeyValueConfigFile::get, tr1::cref(kvs), _1));
- from_string<Tokeniser>(fromfile, "PATH", search_dirs, delims);
- from_string<Tokeniser>(fromfile, "ROOTPATH", search_dirs, delims);
+ from_colon_string(fromfile, "PATH", search_dirs);
+ from_colon_string(fromfile, "ROOTPATH", search_dirs);
}
else if (etc_profile_env.exists())
Log::get_instance()->message(ll_warning, lc_context, "'" + stringify(etc_profile_env) + "' exists but is not a regular file");
@@ -288,19 +286,19 @@ Implementation<Configuration>::add_defaults()
static const std::string default_ld_so_conf("/lib /usr/lib");
Log::get_instance()->message(ll_debug, lc_context, "Got LD_LIBRARY_MASK=\"" + default_ld_library_mask + "\"");
- WhitespaceTokeniser::tokenise(
+ tokenise_whitespace(
default_ld_library_mask, std::back_inserter(ld_library_mask));
Log::get_instance()->message(ll_debug, lc_context, "Got SEARCH_DIRS=\"" + default_search_dirs + "\"");
- WhitespaceTokeniser::tokenise(
+ tokenise_whitespace(
default_search_dirs, std::back_inserter(search_dirs));
Log::get_instance()->message(ll_debug, lc_context, "Got SEARCH_DIRS_MASK=\"" + default_search_dirs_mask + "\"");
- WhitespaceTokeniser::tokenise(
+ tokenise_whitespace(
default_search_dirs_mask, std::back_inserter(search_dirs_mask));
Log::get_instance()->message(ll_debug, lc_context, "Default ld.so.conf contents is \"" + default_ld_so_conf + "\"");
- WhitespaceTokeniser::tokenise(
+ tokenise_whitespace(
default_ld_so_conf, std::back_inserter(ld_so_conf));
}
diff --git a/src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc b/src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc
index 6561525..d5ce47b 100644
--- a/src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc
+++ b/src/clients/reconcilio/broken_linkage_finder/libtool_linkage_checker.cc
@@ -102,8 +102,7 @@ LibtoolLinkageChecker::check_file(const FSEntry & file)
try
{
KeyValueConfigFile kvs(stream, opts);
- WhitespaceTokeniser::tokenise(
- kvs.get("dependency_libs"), std::back_inserter(deps));
+ tokenise_whitespace(kvs.get("dependency_libs"), std::back_inserter(deps));
}
catch (const ConfigFileError & ex)
{
diff --git a/src/output/console_query_task.cc b/src/output/console_query_task.cc
index f11f69e..b4a01bc 100644
--- a/src/output/console_query_task.cc
+++ b/src/output/console_query_task.cc
@@ -660,7 +660,7 @@ namespace
std::string normalise(const std::string & s)
{
std::list<std::string> w;
- WhitespaceTokeniser::tokenise(s, std::back_inserter(w));
+ tokenise_whitespace(s, std::back_inserter(w));
return join(w.begin(), w.end(), " ");
}
}