Remove getNameValue from tokenizer (duplicate functionality of getStringValue)

pull/1279/head
Andrew Tribick 2021-12-14 19:39:57 +01:00 committed by ajtribick
parent 1d77f75963
commit 2a533b20d5
10 changed files with 37 additions and 44 deletions

View File

@ -242,7 +242,7 @@ bool DSODatabase::load(istream& in, const fs::path& resourcePath)
GetLogger()->error("Error parsing deep sky catalog file.\n"); GetLogger()->error("Error parsing deep sky catalog file.\n");
return false; return false;
} }
objType = tokenizer.getNameValue(); objType = tokenizer.getStringValue();
bool autoGenCatalogNumber = true; bool autoGenCatalogNumber = true;
AstroCatalog::IndexNumber objCatalogNumber = AstroCatalog::InvalidIndex; AstroCatalog::IndexNumber objCatalogNumber = AstroCatalog::InvalidIndex;

View File

@ -76,7 +76,7 @@ Hash* Parser::readHash()
delete hash; delete hash;
return nullptr; return nullptr;
} }
string name = tokenizer->getNameValue(); string name = tokenizer->getStringValue();
#ifndef USE_POSTFIX_UNITS #ifndef USE_POSTFIX_UNITS
readUnits(name, hash); readUnits(name, hash);
@ -126,7 +126,7 @@ bool Parser::readUnits(const string& propertyName, Hash* hash)
return false; return false;
} }
string unit = tokenizer->getNameValue(); string unit = tokenizer->getStringValue();
Value* value = new Value(unit); Value* value = new Value(unit);
if (astro::isLengthUnit(unit)) if (astro::isLengthUnit(unit))
@ -174,9 +174,9 @@ Value* Parser::readValue()
return new Value(tokenizer->getStringValue()); return new Value(tokenizer->getStringValue());
case Tokenizer::TokenName: case Tokenizer::TokenName:
if (tokenizer->getNameValue() == "false") if (tokenizer->getStringValue() == "false")
return new Value(false); return new Value(false);
else if (tokenizer->getNameValue() == "true") else if (tokenizer->getStringValue() == "true")
return new Value(true); return new Value(true);
else else
{ {

View File

@ -1125,17 +1125,17 @@ bool LoadSolarSystemObjects(istream& in,
DataDisposition disposition = DataDisposition::Add; DataDisposition disposition = DataDisposition::Add;
if (tokenizer.getTokenType() == Tokenizer::TokenName) if (tokenizer.getTokenType() == Tokenizer::TokenName)
{ {
if (tokenizer.getNameValue() == "Add") if (tokenizer.getStringValue() == "Add")
{ {
disposition = DataDisposition::Add; disposition = DataDisposition::Add;
tokenizer.nextToken(); tokenizer.nextToken();
} }
else if (tokenizer.getNameValue() == "Replace") else if (tokenizer.getStringValue() == "Replace")
{ {
disposition = DataDisposition::Replace; disposition = DataDisposition::Replace;
tokenizer.nextToken(); tokenizer.nextToken();
} }
else if (tokenizer.getNameValue() == "Modify") else if (tokenizer.getStringValue() == "Modify")
{ {
disposition = DataDisposition::Modify; disposition = DataDisposition::Modify;
tokenizer.nextToken(); tokenizer.nextToken();
@ -1146,7 +1146,7 @@ bool LoadSolarSystemObjects(istream& in,
string itemType("Body"); string itemType("Body");
if (tokenizer.getTokenType() == Tokenizer::TokenName) if (tokenizer.getTokenType() == Tokenizer::TokenName)
{ {
itemType = tokenizer.getNameValue(); itemType = tokenizer.getStringValue();
tokenizer.nextToken(); tokenizer.nextToken();
} }

View File

@ -1204,17 +1204,17 @@ bool StarDatabase::load(istream& in, const fs::path& resourcePath)
DataDisposition disposition = DataDisposition::Add; DataDisposition disposition = DataDisposition::Add;
if (tokenizer.getTokenType() == Tokenizer::TokenName) if (tokenizer.getTokenType() == Tokenizer::TokenName)
{ {
if (tokenizer.getNameValue() == "Modify") if (tokenizer.getStringValue() == "Modify")
{ {
disposition = DataDisposition::Modify; disposition = DataDisposition::Modify;
tokenizer.nextToken(); tokenizer.nextToken();
} }
else if (tokenizer.getNameValue() == "Replace") else if (tokenizer.getStringValue() == "Replace")
{ {
disposition = DataDisposition::Replace; disposition = DataDisposition::Replace;
tokenizer.nextToken(); tokenizer.nextToken();
} }
else if (tokenizer.getNameValue() == "Add") else if (tokenizer.getStringValue() == "Add")
{ {
disposition = DataDisposition::Add; disposition = DataDisposition::Add;
tokenizer.nextToken(); tokenizer.nextToken();
@ -1225,11 +1225,11 @@ bool StarDatabase::load(istream& in, const fs::path& resourcePath)
// may be omitted. The default is Star. // may be omitted. The default is Star.
if (tokenizer.getTokenType() == Tokenizer::TokenName) if (tokenizer.getTokenType() == Tokenizer::TokenName)
{ {
if (tokenizer.getNameValue() == "Star") if (tokenizer.getStringValue() == "Star")
{ {
isStar = true; isStar = true;
} }
else if (tokenizer.getNameValue() == "Barycenter") else if (tokenizer.getStringValue() == "Barycenter")
{ {
isStar = false; isStar = false;
} }

View File

@ -372,7 +372,7 @@ static VirtualTexture* LoadVirtualTexture(istream& in, const fs::path& path)
if (tokenizer.nextToken() != Tokenizer::TokenName) if (tokenizer.nextToken() != Tokenizer::TokenName)
return nullptr; return nullptr;
string virtTexString = tokenizer.getNameValue(); string virtTexString = tokenizer.getStringValue();
if (virtTexString != "VirtualTexture") if (virtTexString != "VirtualTexture")
return nullptr; return nullptr;

View File

@ -307,7 +307,7 @@ AsciiModelLoader::reportError(const std::string& msg)
bool bool
AsciiModelLoader::loadMaterial(Material& material) AsciiModelLoader::loadMaterial(Material& material)
{ {
if (tok.nextToken() != Tokenizer::TokenName || tok.getNameValue() != MaterialToken) if (tok.nextToken() != Tokenizer::TokenName || tok.getStringValue() != MaterialToken)
{ {
reportError("Material definition expected"); reportError("Material definition expected");
return false; return false;
@ -319,7 +319,7 @@ AsciiModelLoader::loadMaterial(Material& material)
material.specularPower = DefaultSpecularPower; material.specularPower = DefaultSpecularPower;
material.opacity = DefaultOpacity; material.opacity = DefaultOpacity;
while (tok.nextToken() == Tokenizer::TokenName && tok.getNameValue() != EndMaterialToken) while (tok.nextToken() == Tokenizer::TokenName && tok.getStringValue() != EndMaterialToken)
{ {
std::string property = tok.getStringValue(); std::string property = tok.getStringValue();
TextureSemantic texType = parseTextureSemantic(property); TextureSemantic texType = parseTextureSemantic(property);
@ -424,7 +424,7 @@ AsciiModelLoader::loadMaterial(Material& material)
VertexDescription VertexDescription
AsciiModelLoader::loadVertexDescription() AsciiModelLoader::loadVertexDescription()
{ {
if (tok.nextToken() != Tokenizer::TokenName || tok.getNameValue() != VertexDescToken) if (tok.nextToken() != Tokenizer::TokenName || tok.getStringValue() != VertexDescToken)
{ {
reportError("Vertex description expected"); reportError("Vertex description expected");
return {}; return {};
@ -436,7 +436,7 @@ AsciiModelLoader::loadVertexDescription()
std::vector<VertexAttribute> attributes; std::vector<VertexAttribute> attributes;
attributes.reserve(maxAttributes); attributes.reserve(maxAttributes);
while (tok.nextToken() == Tokenizer::TokenName && tok.getNameValue() != EndVertexDescToken) while (tok.nextToken() == Tokenizer::TokenName && tok.getStringValue() != EndVertexDescToken)
{ {
std::string semanticName; std::string semanticName;
std::string formatName; std::string formatName;
@ -504,7 +504,7 @@ std::vector<VWord>
AsciiModelLoader::loadVertices(const VertexDescription& vertexDesc, AsciiModelLoader::loadVertices(const VertexDescription& vertexDesc,
unsigned int& vertexCount) unsigned int& vertexCount)
{ {
if (tok.nextToken() != Tokenizer::TokenName && tok.getNameValue() != VerticesToken) if (tok.nextToken() != Tokenizer::TokenName && tok.getStringValue() != VerticesToken)
{ {
reportError("Vertex data expected"); reportError("Vertex data expected");
return {}; return {};
@ -597,7 +597,7 @@ AsciiModelLoader::loadVertices(const VertexDescription& vertexDesc,
bool bool
AsciiModelLoader::loadMesh(Mesh& mesh) AsciiModelLoader::loadMesh(Mesh& mesh)
{ {
if (tok.nextToken() != Tokenizer::TokenName && tok.getNameValue() != MeshToken) if (tok.nextToken() != Tokenizer::TokenName && tok.getStringValue() != MeshToken)
{ {
reportError("Mesh definition expected"); reportError("Mesh definition expected");
return false; return false;
@ -617,7 +617,7 @@ AsciiModelLoader::loadMesh(Mesh& mesh)
mesh.setVertexDescription(std::move(vertexDesc)); mesh.setVertexDescription(std::move(vertexDesc));
mesh.setVertices(vertexCount, std::move(vertexData)); mesh.setVertices(vertexCount, std::move(vertexData));
while (tok.nextToken() == Tokenizer::TokenName && tok.getNameValue() != EndMeshToken) while (tok.nextToken() == Tokenizer::TokenName && tok.getStringValue() != EndMeshToken)
{ {
PrimitiveGroupType type = parsePrimitiveGroupType(tok.getStringValue()); PrimitiveGroupType type = parsePrimitiveGroupType(tok.getStringValue());
if (type == PrimitiveGroupType::InvalidPrimitiveGroupType) if (type == PrimitiveGroupType::InvalidPrimitiveGroupType)

View File

@ -870,7 +870,7 @@ uint64_t parseRenderFlags(const string &s, const FlagMap64& RenderFlagMap)
{ {
if (ttype == Tokenizer::TokenName) if (ttype == Tokenizer::TokenName)
{ {
string name = tokenizer.getNameValue(); string name = tokenizer.getStringValue();
if (RenderFlagMap.count(name) == 0) if (RenderFlagMap.count(name) == 0)
GetLogger()->warn("Unknown render flag: {}\n", name); GetLogger()->warn("Unknown render flag: {}\n", name);
@ -899,7 +899,7 @@ int parseLabelFlags(const string &s, const FlagMap &LabelFlagMap)
{ {
if (ttype == Tokenizer::TokenName) if (ttype == Tokenizer::TokenName)
{ {
string name = tokenizer.getNameValue(); string name = tokenizer.getStringValue();
if (LabelFlagMap.count(name) == 0) if (LabelFlagMap.count(name) == 0)
GetLogger()->warn("Unknown label flag: {}\n", name); GetLogger()->warn("Unknown label flag: {}\n", name);
@ -928,7 +928,7 @@ int parseOrbitFlags(const string &s, const FlagMap &BodyTypeMap)
{ {
if (ttype == Tokenizer::TokenName) if (ttype == Tokenizer::TokenName)
{ {
string name = tokenizer.getNameValue(); string name = tokenizer.getStringValue();
name[0] = toupper(name[0]); name[0] = toupper(name[0]);
if (BodyTypeMap.count(name) == 0) if (BodyTypeMap.count(name) == 0)
@ -958,7 +958,7 @@ int parseConstellations(CommandConstellations* cmd, const string &s, int act)
{ {
if (ttype == Tokenizer::TokenName) if (ttype == Tokenizer::TokenName)
{ {
string name = tokenizer.getNameValue(); string name = tokenizer.getStringValue();
if (compareIgnoringCase(name, "all") == 0 && act==1) if (compareIgnoringCase(name, "all") == 0 && act==1)
cmd->flags.all = true; cmd->flags.all = true;
else if (compareIgnoringCase(name, "all") == 0 && act==0) else if (compareIgnoringCase(name, "all") == 0 && act==0)
@ -998,7 +998,7 @@ int parseConstellationColor(CommandConstellationColor* cmd, const string &s, Eig
{ {
if (ttype == Tokenizer::TokenName) if (ttype == Tokenizer::TokenName)
{ {
string name = tokenizer.getNameValue(); string name = tokenizer.getStringValue();
if (compareIgnoringCase(name, "all") == 0 && act==1) if (compareIgnoringCase(name, "all") == 0 && act==1)
cmd->flags.all = true; cmd->flags.all = true;
else if (compareIgnoringCase(name, "all") == 0 && act==0) else if (compareIgnoringCase(name, "all") == 0 && act==0)

View File

@ -565,12 +565,6 @@ std::int32_t Tokenizer::getIntegerValue() const
} }
std::string Tokenizer::getNameValue() const
{
return textToken;
}
std::string Tokenizer::getStringValue() const std::string Tokenizer::getStringValue() const
{ {
return textToken; return textToken;

View File

@ -45,7 +45,6 @@ public:
double getNumberValue() const; double getNumberValue() const;
bool isInteger() const; bool isInteger() const;
std::int32_t getIntegerValue() const; std::int32_t getIntegerValue() const;
std::string getNameValue() const;
std::string getStringValue() const; std::string getStringValue() const;
int getLineNumber() const; int getLineNumber() const;

View File

@ -17,19 +17,19 @@ TEST_CASE("Tokenizer parses names", "[Tokenizer]")
Tokenizer tok(&input); Tokenizer tok(&input);
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "Normal"); REQUIRE(tok.getStringValue() == "Normal");
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "Number2"); REQUIRE(tok.getStringValue() == "Number2");
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "Number3Number"); REQUIRE(tok.getStringValue() == "Number3Number");
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "snake_case"); REQUIRE(tok.getStringValue() == "snake_case");
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "_prefixed"); REQUIRE(tok.getStringValue() == "_prefixed");
REQUIRE(tok.nextToken() == Tokenizer::TokenEnd); REQUIRE(tok.nextToken() == Tokenizer::TokenEnd);
} }
@ -40,12 +40,12 @@ TEST_CASE("Tokenizer parses names", "[Tokenizer]")
Tokenizer tok(&input); Tokenizer tok(&input);
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "Quantity"); REQUIRE(tok.getStringValue() == "Quantity");
REQUIRE(tok.nextToken() == Tokenizer::TokenBeginUnits); REQUIRE(tok.nextToken() == Tokenizer::TokenBeginUnits);
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "unit"); REQUIRE(tok.getStringValue() == "unit");
REQUIRE(tok.nextToken() == Tokenizer::TokenEndUnits); REQUIRE(tok.nextToken() == Tokenizer::TokenEndUnits);
REQUIRE(tok.nextToken() == Tokenizer::TokenEnd); REQUIRE(tok.nextToken() == Tokenizer::TokenEnd);
@ -332,13 +332,13 @@ TEST_CASE("Tokenizer skips comments", "[Tokenizer]")
Tokenizer tok(&input); Tokenizer tok(&input);
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "Token1"); REQUIRE(tok.getStringValue() == "Token1");
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "Token2"); REQUIRE(tok.getStringValue() == "Token2");
REQUIRE(tok.nextToken() == Tokenizer::TokenName); REQUIRE(tok.nextToken() == Tokenizer::TokenName);
REQUIRE(tok.getNameValue() == "Token3"); REQUIRE(tok.getStringValue() == "Token3");
REQUIRE(tok.nextToken() == Tokenizer::TokenEnd); REQUIRE(tok.nextToken() == Tokenizer::TokenEnd);
} }