aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAvatar Ciaran McCreesh <ciaran.mccreesh@googlemail.com> 2006-05-13 12:27:58 +0000
committerAvatar Ciaran McCreesh <ciaran.mccreesh@googlemail.com> 2006-05-13 12:27:58 +0000
commit860c46d1fc12072cf6b422289f45f143ee5ff105 (patch)
tree1cafdf6902d5042dfbe35b54cc3ff10b39ed400d
parent4c6642465a4950796da1e63527f0b919ac896ec7 (diff)
downloadpaludis-860c46d1fc12072cf6b422289f45f143ee5ff105.tar.gz
paludis-860c46d1fc12072cf6b422289f45f143ee5ff105.tar.xz
Generalise WhitespaceTokeniser to include newlines. Switch to using WhitespaceTokeniser throughout.
-rw-r--r--paludis/config_file.cc3
-rw-r--r--paludis/default_config.cc8
-rw-r--r--paludis/default_environment.cc3
-rw-r--r--paludis/portage_repository.cc24
-rw-r--r--paludis/util/tokeniser.cc2
-rw-r--r--paludis/vdb_repository.cc3
6 files changed, 16 insertions, 27 deletions
diff --git a/paludis/config_file.cc b/paludis/config_file.cc
index 9f4850d..4548347 100644
--- a/paludis/config_file.cc
+++ b/paludis/config_file.cc
@@ -315,8 +315,7 @@ AdvisoryLine::AdvisoryLine(const std::string & s) :
_line(s),
_is_range(false)
{
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser(" \t");
- tokeniser.tokenise(s, std::back_inserter(_tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(s, std::back_inserter(_tokens));
if ((_tokens.size() < 1) || (_tokens.size() > 2))
throw AdvisoryFileError("Wrong count of atoms on line.");
diff --git a/paludis/default_config.cc b/paludis/default_config.cc
index 18e9641..2afdb88 100644
--- a/paludis/default_config.cc
+++ b/paludis/default_config.cc
@@ -57,8 +57,6 @@ DefaultConfig::DefaultConfig() :
if (! getenv_with_default("PALUDIS_SKIP_CONFIG", "").empty())
return;
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser(" \t\n");
-
/* indirection */
std::string root_prefix;
std::string config_suffix;
@@ -161,7 +159,7 @@ DefaultConfig::DefaultConfig() :
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- tokeniser.tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
if (tokens.empty())
continue;
if ("*" == tokens.at(0))
@@ -200,7 +198,7 @@ DefaultConfig::DefaultConfig() :
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- tokeniser.tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
if (tokens.empty())
continue;
if ("*" == tokens.at(0))
@@ -285,7 +283,7 @@ DefaultConfig::DefaultConfig() :
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- tokeniser.tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
if (tokens.empty())
continue;
diff --git a/paludis/default_environment.cc b/paludis/default_environment.cc
index 07e4fe8..5217472 100644
--- a/paludis/default_environment.cc
+++ b/paludis/default_environment.cc
@@ -438,7 +438,6 @@ DefaultEnvironment::local_package_set(const std::string & s) const
{
Context context("When looking for package set '" + s + "' in default environment:");
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser(" \t\n");
FSEntry ff(FSEntry(DefaultConfig::get_instance()->config_dir()) / "sets" / (s + ".conf"));
if (ff.exists())
{
@@ -450,7 +449,7 @@ DefaultEnvironment::local_package_set(const std::string & s) const
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- tokeniser.tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
if (tokens.empty())
continue;
diff --git a/paludis/portage_repository.cc b/paludis/portage_repository.cc
index 758be69..582eb55 100644
--- a/paludis/portage_repository.cc
+++ b/paludis/portage_repository.cc
@@ -277,9 +277,8 @@ Implementation<PortageRepository>::add_profile(const FSEntry & f) const
for (UseFlagSet::const_iterator x(expand_list.begin()), x_end(expand_list.end()) ;
x != x_end ; ++x)
{
- static Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser(" \t\n");
std::list<std::string> uses;
- tokeniser.tokenise(profile_env[stringify(*x)], std::back_inserter(uses));
+ WhitespaceTokeniser::get_instance()->tokenise(profile_env[stringify(*x)], std::back_inserter(uses));
for (std::list<std::string>::const_iterator u(uses.begin()), u_end(uses.end()) ;
u != u_end ; ++u)
{
@@ -301,8 +300,6 @@ Implementation<PortageRepository>::add_profile_r(const FSEntry & f) const
{
Context context("When reading profile directory '" + stringify(f) + "':");
- static Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser(" \t\n");
-
if (! f.is_directory())
{
Log::get_instance()->message(ll_warning, "Profile component '" + stringify(f) +
@@ -337,7 +334,7 @@ Implementation<PortageRepository>::add_profile_r(const FSEntry & f) const
KeyValueConfigFile make_defaults_f(f / "make.defaults");
std::deque<std::string> uses;
- tokeniser.tokenise(make_defaults_f.get("USE"), std::back_inserter(uses));
+ WhitespaceTokeniser::get_instance()->tokenise(make_defaults_f.get("USE"), std::back_inserter(uses));
for (std::deque<std::string>::const_iterator u(uses.begin()), u_end(uses.end()) ;
u != u_end ; ++u)
{
@@ -347,7 +344,8 @@ Implementation<PortageRepository>::add_profile_r(const FSEntry & f) const
use[UseFlagName(*u)] = use_enabled;
}
- tokeniser.tokenise(make_defaults_f.get("USE_EXPAND"), create_inserter<UseFlagName>(
+ WhitespaceTokeniser::get_instance()->tokenise(
+ make_defaults_f.get("USE_EXPAND"), create_inserter<UseFlagName>(
std::inserter(expand_list, expand_list.begin())));
for (KeyValueConfigFile::Iterator k(make_defaults_f.begin()),
@@ -377,7 +375,7 @@ Implementation<PortageRepository>::add_profile_r(const FSEntry & f) const
line != line_end; ++line)
{
std::deque<std::string> tokens;
- tokeniser.tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
@@ -420,7 +418,7 @@ Implementation<PortageRepository>::add_profile_r(const FSEntry & f) const
line != line_end; ++line)
{
std::deque<std::string> tokens;
- tokeniser.tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
@@ -448,7 +446,7 @@ Implementation<PortageRepository>::add_profile_r(const FSEntry & f) const
line != line_end ; ++line)
{
std::deque<std::string> tokens;
- tokeniser.tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
if (tokens.size() < 2)
continue;
virtuals_map.erase(QualifiedPackageName(tokens[0]));
@@ -1258,15 +1256,13 @@ PortageRepository::do_is_mirror(const std::string & s) const
{
if (! _imp->has_mirrors)
{
- static Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser(" \t\n");
-
if ((_imp->location / "profiles" / "thirdpartymirrors").exists())
{
LineConfigFile mirrors(_imp->location / "profiles" / "thirdpartymirrors");
for (LineConfigFile::Iterator line(mirrors.begin()) ; line != mirrors.end() ; ++line)
{
std::vector<std::string> entries;
- tokeniser.tokenise(*line, std::back_inserter(entries));
+ WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(entries));
if (! entries.empty())
{
/* pick up to five random mirrors only */
@@ -1690,13 +1686,11 @@ PortageRepository::do_package_set(const std::string & s) const
AllDepAtom::Pointer result(new AllDepAtom);
LineConfigFile f(ff);
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> tokeniser(" \t\n");
-
for (LineConfigFile::Iterator line(f.begin()), line_end(f.end()) ;
line != line_end ; ++line)
{
std::vector<std::string> tokens;
- tokeniser.tokenise(*line, std::back_inserter(tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(*line, std::back_inserter(tokens));
if (tokens.empty())
continue;
diff --git a/paludis/util/tokeniser.cc b/paludis/util/tokeniser.cc
index 8d5fbbe..1886200 100644
--- a/paludis/util/tokeniser.cc
+++ b/paludis/util/tokeniser.cc
@@ -22,7 +22,7 @@
using namespace paludis;
WhitespaceTokeniser::WhitespaceTokeniser() :
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(" \t\n")
+ Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag>(" \t\r\n")
{
}
diff --git a/paludis/vdb_repository.cc b/paludis/vdb_repository.cc
index bb1a307..a4f3601 100644
--- a/paludis/vdb_repository.cc
+++ b/paludis/vdb_repository.cc
@@ -536,13 +536,12 @@ VDBRepository::do_contents(
std::string line;
unsigned line_number(0);
- Tokeniser<delim_kind::AnyOfTag, delim_mode::DelimiterTag> t(" \t\n");
while (std::getline(ff, line))
{
++line_number;
std::vector<std::string> tokens;
- t.tokenise(line, std::back_inserter(tokens));
+ WhitespaceTokeniser::get_instance()->tokenise(line, std::back_inserter(tokens));
if (tokens.empty())
continue;