ENH: improve consistency of adding tokens to ITstream
- use add_tokens() instead of the old multi-parameter append(.., bool) method which was misleading since it added tokens at the current tokenIndex, not at the end. - stringify ITstream contents with CharStream instead of StringStream. Allows string_view for copying out the content. ENH: set namedDictionary dictionary name from Istream - provides context for error messages etc (#2990)
This commit is contained in:
parent
fb26fcedfc
commit
c1e2fd6726
@ -29,7 +29,7 @@ License
|
||||
#include "error.H"
|
||||
#include "ITstream.H"
|
||||
#include "SpanStream.H"
|
||||
#include "StringStream.H"
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
|
||||
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
|
||||
@ -132,35 +132,25 @@ Foam::tokenList Foam::ITstream::parse
|
||||
|
||||
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
|
||||
|
||||
void Foam::ITstream::reserveCapacity
|
||||
(
|
||||
const label nElem,
|
||||
const bool lazy
|
||||
)
|
||||
void Foam::ITstream::reserveCapacity(const label newCapacity)
|
||||
{
|
||||
if (lazy)
|
||||
// Reserve - leave excess capacity for further appends
|
||||
|
||||
label len = tokenList::size();
|
||||
|
||||
if (len < newCapacity)
|
||||
{
|
||||
// Reserve - leave excess capacity for further appends
|
||||
// Min-size (16) when starting from zero
|
||||
if (!len) len = 8;
|
||||
|
||||
label n = tokenList::size();
|
||||
|
||||
if (nElem > n)
|
||||
// Increase capacity. Strict doubling
|
||||
do
|
||||
{
|
||||
if (!n) n = 1; // Avoid dead-lock when starting from zero-sized
|
||||
|
||||
do
|
||||
{
|
||||
n *= 2;
|
||||
}
|
||||
while (nElem >= n);
|
||||
|
||||
tokenList::resize(n);
|
||||
len *= 2;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Strict capacity
|
||||
tokenList::resize(nElem);
|
||||
while (len < newCapacity);
|
||||
|
||||
tokenList::resize(len);
|
||||
}
|
||||
}
|
||||
|
||||
@ -277,7 +267,7 @@ Foam::ITstream::ITstream
|
||||
:
|
||||
ITstream(streamOpt, name)
|
||||
{
|
||||
ISpanStream is(input.data(), input.length(), streamOpt);
|
||||
ISpanStream is(input, streamOpt);
|
||||
|
||||
parseStream(is, static_cast<tokenList&>(*this));
|
||||
ITstream::seek(0); // rewind(), but bypasss virtual
|
||||
@ -329,26 +319,39 @@ void Foam::ITstream::print(Ostream& os) const
|
||||
|
||||
std::string Foam::ITstream::toString() const
|
||||
{
|
||||
const tokenList& toks = *this;
|
||||
const label nToks = toks.size();
|
||||
|
||||
if (nToks == 1 && toks.front().isStringType())
|
||||
if (tokenList::empty())
|
||||
{
|
||||
return std::string();
|
||||
}
|
||||
else if (tokenList::size() == 1 && tokenList::front().isStringType())
|
||||
{
|
||||
// Already a string-type (WORD, STRING, ...). Just copy.
|
||||
return toks.front().stringToken();
|
||||
return tokenList::front().stringToken();
|
||||
}
|
||||
|
||||
OStringStream buf;
|
||||
// Stringify
|
||||
OCharStream buf;
|
||||
buf.precision(16); // Some reasonably high precision
|
||||
bool addSpace = false; // Separate from previous token with a space
|
||||
for (const token& tok : toks)
|
||||
|
||||
auto iter = tokenList::cbegin();
|
||||
const auto last = tokenList::cend();
|
||||
|
||||
// Note: could also just use the buffer token-wise
|
||||
|
||||
// Contents - space separated
|
||||
if (iter != last)
|
||||
{
|
||||
if (addSpace) buf << token::SPACE;
|
||||
addSpace = true;
|
||||
buf << tok;
|
||||
buf << *iter;
|
||||
|
||||
for (++iter; (iter != last); (void)++iter)
|
||||
{
|
||||
buf << token::SPACE << *iter;
|
||||
}
|
||||
}
|
||||
|
||||
return buf.str();
|
||||
const auto view = buf.view();
|
||||
|
||||
return std::string(view.data(), view.size());
|
||||
}
|
||||
|
||||
|
||||
@ -733,51 +736,42 @@ void Foam::ITstream::rewind()
|
||||
}
|
||||
|
||||
|
||||
void Foam::ITstream::push_back(const token& t, const bool lazy)
|
||||
void Foam::ITstream::add_tokens(const token& tok)
|
||||
{
|
||||
reserveCapacity(tokenIndex_ + 1, lazy);
|
||||
tokenList& toks = *this;
|
||||
reserveCapacity(tokenIndex_ + 1);
|
||||
|
||||
toks[tokenIndex_] = t; // copy append
|
||||
tokenList::operator[](tokenIndex_) = tok;
|
||||
++tokenIndex_;
|
||||
}
|
||||
|
||||
|
||||
void Foam::ITstream::push_back(token&& t, const bool lazy)
|
||||
void Foam::ITstream::add_tokens(token&& tok)
|
||||
{
|
||||
reserveCapacity(tokenIndex_ + 1, lazy);
|
||||
tokenList& toks = *this;
|
||||
reserveCapacity(tokenIndex_ + 1);
|
||||
|
||||
toks[tokenIndex_] = std::move(t); // move append
|
||||
tokenList::operator[](tokenIndex_) = std::move(tok);
|
||||
++tokenIndex_;
|
||||
}
|
||||
|
||||
|
||||
void Foam::ITstream::push_back(const UList<token>& newTokens, const bool lazy)
|
||||
void Foam::ITstream::add_tokens(const UList<token>& toks)
|
||||
{
|
||||
reserveCapacity(tokenIndex_ + newTokens.size(), lazy);
|
||||
tokenList& toks = *this;
|
||||
const label len = toks.size();
|
||||
reserveCapacity(tokenIndex_ + len);
|
||||
|
||||
for (const token& t : newTokens)
|
||||
{
|
||||
toks[tokenIndex_] = t; // copy append
|
||||
++tokenIndex_;
|
||||
}
|
||||
std::copy_n(toks.begin(), len, tokenList::begin(tokenIndex_));
|
||||
tokenIndex_ += len;
|
||||
}
|
||||
|
||||
|
||||
void Foam::ITstream::push_back(List<token>&& newTokens, const bool lazy)
|
||||
void Foam::ITstream::add_tokens(List<token>&& toks)
|
||||
{
|
||||
reserveCapacity(tokenIndex_ + newTokens.size(), lazy);
|
||||
tokenList& toks = *this;
|
||||
const label len = toks.size();
|
||||
reserveCapacity(tokenIndex_ + len);
|
||||
|
||||
for (token& t : newTokens)
|
||||
{
|
||||
toks[tokenIndex_] = std::move(t); // move append
|
||||
++tokenIndex_;
|
||||
}
|
||||
|
||||
newTokens.clear();
|
||||
std::move(toks.begin(), toks.end(), tokenList::begin(tokenIndex_));
|
||||
tokenIndex_ += len;
|
||||
toks.clear();
|
||||
}
|
||||
|
||||
|
||||
|
@ -30,6 +30,10 @@ Class
|
||||
Description
|
||||
An input stream of tokens.
|
||||
|
||||
Although ITstream is principally meant to be used as a read-only
|
||||
input stream, it also provides additional methods to help when
|
||||
composing its contents (eg, when parsing).
|
||||
|
||||
SourceFiles
|
||||
ITstream.C
|
||||
|
||||
@ -69,11 +73,9 @@ class ITstream
|
||||
//- An ad hoc combination of reserve and setCapacity somewhat
|
||||
//- similar to DynamicList.
|
||||
//
|
||||
// In lazy mode, increase list size if needed, but leave any
|
||||
// excess capacity - works like reserve.
|
||||
//
|
||||
// In non-lazy mode, set exact capacity
|
||||
void reserveCapacity(const label nElem, const bool lazy);
|
||||
// Increase list size if needed,
|
||||
// but leave any excess capacity (ie, like reserve).
|
||||
void reserveCapacity(const label newCapacity);
|
||||
|
||||
//- Failsafe read-access to token at specified location
|
||||
//- or undefinedToken
|
||||
@ -345,11 +347,11 @@ public:
|
||||
|
||||
//- Copy append a token at the current tokenIndex,
|
||||
//- incrementing the index.
|
||||
void push_back(const token& t, const bool lazy);
|
||||
void add_tokens(const token& tok);
|
||||
|
||||
//- Move append a token at the current tokenIndex,
|
||||
//- incrementing the index.
|
||||
void push_back(token&& t, const bool lazy);
|
||||
void add_tokens(token&& tok);
|
||||
|
||||
//- Copy append a list of tokens at the current tokenIndex,
|
||||
//- incrementing the index.
|
||||
@ -357,7 +359,7 @@ public:
|
||||
// \param newTokens the list of tokens to copy append
|
||||
// \param lazy leaves any excess capacity for further appends.
|
||||
// The caller will be responsible for resizing later.
|
||||
void push_back(const UList<token>& newTokens, const bool lazy);
|
||||
void add_tokens(const UList<token>& toks);
|
||||
|
||||
//- Move append a list of tokens at the current tokenIndex,
|
||||
//- incrementing the index.
|
||||
@ -365,7 +367,7 @@ public:
|
||||
// \param newTokens the list of tokens to move append
|
||||
// \param lazy leaves any excess capacity for further appends.
|
||||
// The caller will be responsible for resizing later.
|
||||
void push_back(List<token>&& newTokens, const bool lazy);
|
||||
void add_tokens(List<token>&& toks);
|
||||
|
||||
|
||||
// Stream State Functions
|
||||
@ -479,43 +481,32 @@ public:
|
||||
// Housekeeping
|
||||
|
||||
//- Same as front()
|
||||
FOAM_DEPRECATED_STRICT(2022-11, "front()")
|
||||
const token& peekFirst() const { return front(); }
|
||||
|
||||
//- Copy append a token at the current tokenIndex,
|
||||
//- incrementing the index.
|
||||
void append(const token& t, const bool lazy)
|
||||
{
|
||||
this->push_back(std::move(t), lazy);
|
||||
}
|
||||
FOAM_DEPRECATED_STRICT(2023-10, "add_tokens()")
|
||||
void append(const token& t, bool) { add_tokens(t); }
|
||||
|
||||
//- Move append a token at the current tokenIndex,
|
||||
//- incrementing the index.
|
||||
void append(token&& t, const bool lazy)
|
||||
{
|
||||
this->push_back(std::move(t), lazy);
|
||||
}
|
||||
FOAM_DEPRECATED_STRICT(2023-10, "add_tokens()")
|
||||
void append(token&& t, bool) { add_tokens(std::move(t)); }
|
||||
|
||||
//- Copy append a list of tokens at the current tokenIndex,
|
||||
//- incrementing the index.
|
||||
//
|
||||
// \param newTokens the list of tokens to copy append
|
||||
// \param lazy leaves any excess capacity for further appends.
|
||||
// The caller will be responsible for resizing later.
|
||||
void append(const UList<token>& newTokens, const bool lazy)
|
||||
{
|
||||
this->push_back(newTokens, lazy);
|
||||
}
|
||||
FOAM_DEPRECATED_STRICT(2023-10, "add_tokens()")
|
||||
void append(const UList<token>& t, bool) { add_tokens(t); }
|
||||
|
||||
//- Move append a list of tokens at the current tokenIndex,
|
||||
//- incrementing the index.
|
||||
//
|
||||
// \param newTokens the list of tokens to move append
|
||||
// \param lazy leaves any excess capacity for further appends.
|
||||
// The caller will be responsible for resizing later.
|
||||
void append(List<token>&& newTokens, const bool lazy)
|
||||
{
|
||||
this->push_back(std::move(newTokens), lazy);
|
||||
}
|
||||
FOAM_DEPRECATED_STRICT(2023-10, "add_tokens()")
|
||||
void append(List<token>&& t, bool) { add_tokens(std::move(t)); }
|
||||
|
||||
FOAM_DEPRECATED_STRICT(2023-10, "add_tokens()")
|
||||
void push_back(const token& t, bool) { add_tokens(t); }
|
||||
|
||||
FOAM_DEPRECATED_STRICT(2023-10, "add_tokens()")
|
||||
void push_back(token&& t, bool) { add_tokens(std::move(t)); }
|
||||
|
||||
FOAM_DEPRECATED_STRICT(2023-10, "add_tokens()")
|
||||
void push_back(const UList<token>& t, bool) { add_tokens(t); }
|
||||
|
||||
FOAM_DEPRECATED_STRICT(2023-10, "add_tokens()")
|
||||
void push_back(List<token>&& t, bool) { add_tokens(std::move(t)); }
|
||||
};
|
||||
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2019-2021 OpenCFD Ltd.
|
||||
Copyright (C) 2019-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -255,7 +255,7 @@ bool Foam::functionEntries::evalEntry::execute
|
||||
{
|
||||
tokenList toks(evaluate(parentDict, is));
|
||||
|
||||
entry.append(std::move(toks), true); // Lazy resizing
|
||||
entry.add_tokens(std::move(toks)); // Add at tokenIndex
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -272,7 +272,7 @@ bool Foam::functionEntries::evalEntry::execute
|
||||
{
|
||||
tokenList toks(evaluate(parentDict, inputExpr, fieldWidth, is));
|
||||
|
||||
entry.append(std::move(toks), true); // Lazy resizing
|
||||
entry.add_tokens(std::move(toks)); // Add at tokenIndex
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -5,7 +5,7 @@
|
||||
\\ / A nd | www.openfoam.com
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2021 OpenCFD Ltd.
|
||||
Copyright (C) 2021-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -129,7 +129,7 @@ bool Foam::functionEntries::wordDirective::execute
|
||||
if (tok.good())
|
||||
{
|
||||
// Can add evaluated value directly into primitiveEntry
|
||||
entry.append(std::move(tok), true); // Lazy resizing
|
||||
entry.add_tokens(std::move(tok)); // Add at tokenIndex
|
||||
}
|
||||
|
||||
return true;
|
||||
|
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2020 OpenFOAM Foundation
|
||||
Copyright (C) 2021 OpenCFD Ltd.
|
||||
Copyright (C) 2021-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -83,6 +83,8 @@ Foam::Istream& Foam::operator>>(Istream& is, namedDictionary& obj)
|
||||
if (tok.isPunctuation(token::BEGIN_BLOCK))
|
||||
{
|
||||
obj.dict().read(is);
|
||||
// Provide a name as context for error messages etc.
|
||||
obj.dict().name() = is.relativeName();
|
||||
}
|
||||
|
||||
is.check(FUNCTION_NAME);
|
||||
|
@ -6,7 +6,7 @@
|
||||
\\/ M anipulation |
|
||||
-------------------------------------------------------------------------------
|
||||
Copyright (C) 2020 OpenFOAM Foundation
|
||||
Copyright (C) 2021-2022 OpenCFD Ltd.
|
||||
Copyright (C) 2021-2023 OpenCFD Ltd.
|
||||
-------------------------------------------------------------------------------
|
||||
License
|
||||
This file is part of OpenFOAM.
|
||||
@ -40,8 +40,9 @@ Description
|
||||
);
|
||||
\endverbatim
|
||||
|
||||
In can also be used in situations where an individual dictionary entry
|
||||
should be read.
|
||||
The namedDictionary can also be used in situations where an individual
|
||||
dictionary entry should be read. The keyword() and dict() methods
|
||||
correspond to Foam::entry naming conventions.
|
||||
\verbatim
|
||||
actions
|
||||
(
|
||||
@ -113,28 +114,16 @@ public:
|
||||
bool empty() const noexcept;
|
||||
|
||||
//- Return keyword
|
||||
const keyType& keyword() const noexcept
|
||||
{
|
||||
return first();
|
||||
}
|
||||
const keyType& keyword() const noexcept { return first(); }
|
||||
|
||||
//- Return non-const access to keyword
|
||||
keyType& keyword() noexcept
|
||||
{
|
||||
return first();
|
||||
}
|
||||
keyType& keyword() noexcept { return first(); }
|
||||
|
||||
//- Read-access to the dictionay
|
||||
const dictionary& dict() const noexcept
|
||||
{
|
||||
return second();
|
||||
}
|
||||
const dictionary& dict() const noexcept { return second(); }
|
||||
|
||||
//- Write access to the dictionay
|
||||
dictionary& dict() noexcept
|
||||
{
|
||||
return second();
|
||||
}
|
||||
dictionary& dict() noexcept { return second(); }
|
||||
|
||||
|
||||
// IOstream Operators
|
||||
|
@ -170,9 +170,9 @@ bool Foam::primitiveEntry::expandVariable
|
||||
|
||||
// Parse string into a series of tokens
|
||||
|
||||
tokenList toks(ITstream::parse(str)); // ASCII
|
||||
tokenList toks(ITstream::parse(str)); // ASCII
|
||||
|
||||
ITstream::append(std::move(toks), true); // Lazy resizing
|
||||
ITstream::add_tokens(std::move(toks)); // Add at tokenIndex
|
||||
}
|
||||
else if (eptr->isDict())
|
||||
{
|
||||
@ -185,10 +185,10 @@ bool Foam::primitiveEntry::expandVariable
|
||||
// Not found or empty: use ":-" alternative value
|
||||
// Found and not empty: use ":+" alternative value
|
||||
|
||||
toks = ITstream::parse(altValue); // ASCII
|
||||
toks = ITstream::parse(altValue); // ASCII
|
||||
}
|
||||
|
||||
ITstream::append(std::move(toks), true); // Lazy resizing
|
||||
ITstream::add_tokens(std::move(toks)); // Add at tokenIndex
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -201,11 +201,11 @@ bool Foam::primitiveEntry::expandVariable
|
||||
|
||||
tokenList toks(ITstream::parse(altValue)); // ASCII
|
||||
|
||||
ITstream::append(std::move(toks), true); // Lazy resizing
|
||||
ITstream::add_tokens(std::move(toks)); // Add at tokenIndex
|
||||
}
|
||||
else
|
||||
{
|
||||
ITstream::append(eptr->stream(), true); // Lazy resizing
|
||||
ITstream::add_tokens(eptr->stream()); // Add at tokenIndex
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,7 +198,7 @@ bool Foam::primitiveEntry::read(const dictionary& dict, Istream& is)
|
||||
|
||||
if (acceptToken(tok, dict, is))
|
||||
{
|
||||
newElmt(tokenIndex()++) = std::move(tok);
|
||||
ITstream::add_tokens(std::move(tok)); // Add at tokenIndex
|
||||
}
|
||||
|
||||
// With/without move: clear any old content and force to have a
|
||||
|
Loading…
Reference in New Issue
Block a user