Refactored tokenizer to lazily evaluate file locations.
This commit is contained in:
		@@ -23,7 +23,7 @@ struct Context
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// TODO: replace std::string with char *
 | 
			
		||||
	using WarningCallback = std::function<void (tokenize::Location, const std::string &)>;
 | 
			
		||||
	using WarningCallback = std::function<void (tokenize::Location &&, const std::string &)>;
 | 
			
		||||
 | 
			
		||||
	Context() = default;
 | 
			
		||||
	~Context() = default;
 | 
			
		||||
 
 | 
			
		||||
@@ -2,9 +2,10 @@
 | 
			
		||||
#define __PDDL_PARSE__EXCEPTION_H
 | 
			
		||||
 | 
			
		||||
#include <exception>
 | 
			
		||||
#include <experimental/optional>
 | 
			
		||||
#include <string>
 | 
			
		||||
 | 
			
		||||
#include <tokenize/Stream.h>
 | 
			
		||||
#include <tokenize/Location.h>
 | 
			
		||||
 | 
			
		||||
namespace pddl
 | 
			
		||||
{
 | 
			
		||||
@@ -18,48 +19,45 @@ namespace pddl
 | 
			
		||||
class Exception: public std::exception
 | 
			
		||||
{
 | 
			
		||||
	public:
 | 
			
		||||
		explicit Exception()
 | 
			
		||||
		Exception()
 | 
			
		||||
		:	Exception("unspecified parser error")
 | 
			
		||||
		{
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		explicit Exception(const char *message)
 | 
			
		||||
		Exception(const char *message)
 | 
			
		||||
		:	Exception(static_cast<std::string>(message))
 | 
			
		||||
		{
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		explicit Exception(const std::string &message)
 | 
			
		||||
		Exception(const std::string &message)
 | 
			
		||||
		:	m_message{message}
 | 
			
		||||
		{
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		explicit Exception(const tokenize::Location &location)
 | 
			
		||||
		:	Exception(location, "unspecified parser error")
 | 
			
		||||
		Exception(tokenize::Location &&location)
 | 
			
		||||
		:	Exception(std::forward<tokenize::Location>(location), "unspecified parser error")
 | 
			
		||||
		{
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		explicit Exception(const tokenize::Location &location, const char *message)
 | 
			
		||||
		:	Exception(location, static_cast<std::string>(message))
 | 
			
		||||
		Exception(tokenize::Location &&location, const char *message)
 | 
			
		||||
		:	Exception(std::forward<tokenize::Location>(location), static_cast<std::string>(message))
 | 
			
		||||
		{
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		explicit Exception(const tokenize::Location &location, const std::string &message)
 | 
			
		||||
		:	m_location{location},
 | 
			
		||||
			m_message{message},
 | 
			
		||||
			// TODO: refactor
 | 
			
		||||
			m_plainMessage{std::string(m_location.sectionStart) + ":" + std::to_string(m_location.rowStart)
 | 
			
		||||
				+ ":" + std::to_string(m_location.columnStart) + " " + m_message}
 | 
			
		||||
		Exception(tokenize::Location &&location, const std::string &message)
 | 
			
		||||
		:	m_location{std::move(location)},
 | 
			
		||||
			m_message{message}
 | 
			
		||||
		{
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		~Exception() noexcept = default;
 | 
			
		||||
 | 
			
		||||
		const char *what() const throw()
 | 
			
		||||
		const char *what() const noexcept
 | 
			
		||||
		{
 | 
			
		||||
			return m_plainMessage.c_str();
 | 
			
		||||
			return m_message.c_str();
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		const tokenize::Location &location() const
 | 
			
		||||
		const std::experimental::optional<tokenize::Location> &location() const
 | 
			
		||||
		{
 | 
			
		||||
			return m_location;
 | 
			
		||||
		}
 | 
			
		||||
@@ -70,9 +68,8 @@ class Exception: public std::exception
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
	private:
 | 
			
		||||
		tokenize::Location m_location;
 | 
			
		||||
		std::experimental::optional<tokenize::Location> m_location;
 | 
			
		||||
		std::string m_message;
 | 
			
		||||
		std::string m_plainMessage;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
@@ -34,12 +34,12 @@ class ActionParser
 | 
			
		||||
		Context &m_context;
 | 
			
		||||
		ast::Domain &m_domain;
 | 
			
		||||
 | 
			
		||||
		tokenize::Stream::Position m_parametersPosition;
 | 
			
		||||
		tokenize::Stream::Position m_preconditionPosition;
 | 
			
		||||
		tokenize::Stream::Position m_effectPosition;
 | 
			
		||||
		tokenize::StreamPosition m_parametersPosition;
 | 
			
		||||
		tokenize::StreamPosition m_preconditionPosition;
 | 
			
		||||
		tokenize::StreamPosition m_effectPosition;
 | 
			
		||||
 | 
			
		||||
		// For compatibility with old PDDL versions
 | 
			
		||||
		tokenize::Stream::Position m_varsPosition;
 | 
			
		||||
		tokenize::StreamPosition m_varsPosition;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
@@ -27,8 +27,8 @@ class DescriptionParser
 | 
			
		||||
		void findSections();
 | 
			
		||||
 | 
			
		||||
		Context &m_context;
 | 
			
		||||
		tokenize::Stream::Position m_domainPosition;
 | 
			
		||||
		tokenize::Stream::Position m_problemPosition;
 | 
			
		||||
		tokenize::StreamPosition m_domainPosition;
 | 
			
		||||
		tokenize::StreamPosition m_problemPosition;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
@@ -35,11 +35,11 @@ class DomainParser
 | 
			
		||||
 | 
			
		||||
		Context &m_context;
 | 
			
		||||
 | 
			
		||||
		tokenize::Stream::Position m_requirementsPosition;
 | 
			
		||||
		tokenize::Stream::Position m_typesPosition;
 | 
			
		||||
		tokenize::Stream::Position m_constantsPosition;
 | 
			
		||||
		tokenize::Stream::Position m_predicatesPosition;
 | 
			
		||||
		std::vector<tokenize::Stream::Position> m_actionPositions;
 | 
			
		||||
		tokenize::StreamPosition m_requirementsPosition;
 | 
			
		||||
		tokenize::StreamPosition m_typesPosition;
 | 
			
		||||
		tokenize::StreamPosition m_constantsPosition;
 | 
			
		||||
		tokenize::StreamPosition m_predicatesPosition;
 | 
			
		||||
		std::vector<tokenize::StreamPosition> m_actionPositions;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
@@ -64,12 +64,12 @@ std::experimental::optional<std::unique_ptr<Derived>> parseBinary(Context &conte
 | 
			
		||||
	auto argumentLeft = parseArgumentLeft(context, astContext, variableStack);
 | 
			
		||||
 | 
			
		||||
	if (!argumentLeft)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "could not parse argument of “" + std::string(Derived::Identifier) + "” expression");
 | 
			
		||||
		throw ParserException(tokenizer, "could not parse argument of “" + std::string(Derived::Identifier) + "” expression");
 | 
			
		||||
 | 
			
		||||
	auto argumentRight = parseArgumentRight(context, astContext, variableStack);
 | 
			
		||||
 | 
			
		||||
	if (!argumentRight)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "could not parse argument of “" + std::string(Derived::Identifier) + "” expression");
 | 
			
		||||
		throw ParserException(tokenizer, "could not parse argument of “" + std::string(Derived::Identifier) + "” expression");
 | 
			
		||||
 | 
			
		||||
	tokenizer.expect<std::string>(")");
 | 
			
		||||
 | 
			
		||||
@@ -103,7 +103,7 @@ std::experimental::optional<std::unique_ptr<Derived>> parseNAry(Context &context
 | 
			
		||||
		auto argument = parseArgument(context, astContext, variableStack);
 | 
			
		||||
 | 
			
		||||
		if (!argument)
 | 
			
		||||
			throw ParserException(tokenizer.location(), "could not parse argument of “" + std::string(Derived::Identifier) + "” expression");
 | 
			
		||||
			throw ParserException(tokenizer, "could not parse argument of “" + std::string(Derived::Identifier) + "” expression");
 | 
			
		||||
 | 
			
		||||
		arguments.emplace_back(std::move(argument.value()));
 | 
			
		||||
 | 
			
		||||
@@ -111,7 +111,7 @@ std::experimental::optional<std::unique_ptr<Derived>> parseNAry(Context &context
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if (arguments.empty())
 | 
			
		||||
		context.warningCallback(tokenizer.location(), "“" + std::string(Derived::Identifier) + "” expressions should not be empty");
 | 
			
		||||
		context.warningCallback(tokenizer, "“" + std::string(Derived::Identifier) + "” expressions should not be empty");
 | 
			
		||||
 | 
			
		||||
	tokenizer.expect<std::string>(")");
 | 
			
		||||
 | 
			
		||||
@@ -147,7 +147,7 @@ std::experimental::optional<std::unique_ptr<Derived>> parseQuantified(Context &c
 | 
			
		||||
	auto argument = parseArgument(context, astContext, variableStack);
 | 
			
		||||
 | 
			
		||||
	if (!argument)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "could not parse argument of “" + std::string(Derived::Identifier) + "” expression");
 | 
			
		||||
		throw ParserException(tokenizer, "could not parse argument of “" + std::string(Derived::Identifier) + "” expression");
 | 
			
		||||
 | 
			
		||||
	// Clean up variable stack
 | 
			
		||||
	variableStack.pop();
 | 
			
		||||
@@ -221,7 +221,7 @@ std::experimental::optional<ast::NotPointer<Argument>> parseNot(Context &context
 | 
			
		||||
	auto argument = parseArgument(context, astContext, variableStack);
 | 
			
		||||
 | 
			
		||||
	if (!argument)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "could not parse argument of “not” expression");
 | 
			
		||||
		throw ParserException(tokenizer, "could not parse argument of “not” expression");
 | 
			
		||||
 | 
			
		||||
	tokenizer.expect<std::string>(")");
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -36,11 +36,11 @@ class ProblemParser
 | 
			
		||||
		Context &m_context;
 | 
			
		||||
		ast::Domain &m_domain;
 | 
			
		||||
 | 
			
		||||
		tokenize::Stream::Position m_domainPosition;
 | 
			
		||||
		tokenize::Stream::Position m_requirementsPosition;
 | 
			
		||||
		tokenize::Stream::Position m_objectsPosition;
 | 
			
		||||
		tokenize::Stream::Position m_initialStatePosition;
 | 
			
		||||
		tokenize::Stream::Position m_goalPosition;
 | 
			
		||||
		tokenize::StreamPosition m_domainPosition;
 | 
			
		||||
		tokenize::StreamPosition m_requirementsPosition;
 | 
			
		||||
		tokenize::StreamPosition m_objectsPosition;
 | 
			
		||||
		tokenize::StreamPosition m_initialStatePosition;
 | 
			
		||||
		tokenize::StreamPosition m_goalPosition;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
@@ -61,9 +61,9 @@ void checkRequirement(ast::Domain &domain, ast::Requirement requirement, Context
 | 
			
		||||
		return;
 | 
			
		||||
 | 
			
		||||
	if (context.mode == Mode::Compatibility)
 | 
			
		||||
		context.warningCallback(context.tokenizer.location(), "requirement “" + std::string(toString(requirement)) + "” used but never declared, silently adding requirement");
 | 
			
		||||
		context.warningCallback(context.tokenizer, "requirement “" + std::string(toString(requirement)) + "” used but never declared, silently adding requirement");
 | 
			
		||||
	else
 | 
			
		||||
		throw ParserException(context.tokenizer.location(), "requirement “" + std::string(toString(requirement)) + "” used but never declared");
 | 
			
		||||
		throw ParserException(context.tokenizer, "requirement “" + std::string(toString(requirement)) + "” used but never declared");
 | 
			
		||||
 | 
			
		||||
	domain.requirements.push_back(requirement);
 | 
			
		||||
}
 | 
			
		||||
@@ -76,9 +76,9 @@ void checkRequirement(ast::Problem &problem, ast::Requirement requirement, Conte
 | 
			
		||||
		return;
 | 
			
		||||
 | 
			
		||||
	if (context.mode == Mode::Compatibility)
 | 
			
		||||
		context.warningCallback(context.tokenizer.location(), "requirement “" + std::string(toString(requirement)) + "” used but never declared, silently adding requirement");
 | 
			
		||||
		context.warningCallback(context.tokenizer, "requirement “" + std::string(toString(requirement)) + "” used but never declared, silently adding requirement");
 | 
			
		||||
	else
 | 
			
		||||
		throw ParserException(context.tokenizer.location(), "requirement “" + std::string(toString(requirement)) + "” used but never declared");
 | 
			
		||||
		throw ParserException(context.tokenizer, "requirement “" + std::string(toString(requirement)) + "” used but never declared");
 | 
			
		||||
 | 
			
		||||
	problem.requirements.push_back(requirement);
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -23,10 +23,10 @@ namespace detail
 | 
			
		||||
ActionParser::ActionParser(Context &context, ast::Domain &domain)
 | 
			
		||||
:	m_context{context},
 | 
			
		||||
	m_domain{domain},
 | 
			
		||||
	m_parametersPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_preconditionPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_effectPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_varsPosition{tokenize::Stream::InvalidPosition}
 | 
			
		||||
	m_parametersPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_preconditionPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_effectPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_varsPosition{tokenize::InvalidStreamPosition}
 | 
			
		||||
{
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -40,26 +40,26 @@ ast::ActionPointer ActionParser::parse()
 | 
			
		||||
 | 
			
		||||
	auto &tokenizer = m_context.tokenizer;
 | 
			
		||||
 | 
			
		||||
	if (m_parametersPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_parametersPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_parametersPosition);
 | 
			
		||||
		parseParameterSection(*action);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// For compatibility with old PDDL versions, vars sections are parsed in addition to parameters
 | 
			
		||||
	if (m_varsPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_varsPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_varsPosition);
 | 
			
		||||
		parseVarsSection(*action);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if (m_preconditionPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_preconditionPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_preconditionPosition);
 | 
			
		||||
		parsePreconditionSection(*action);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if (m_effectPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_effectPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_effectPosition);
 | 
			
		||||
		parseEffectSection(*action);
 | 
			
		||||
@@ -84,10 +84,10 @@ void ActionParser::findSections(ast::Action &action)
 | 
			
		||||
	const auto setSectionPosition =
 | 
			
		||||
		[&](const std::string §ionName, auto §ionPosition, const auto value, bool unique = false)
 | 
			
		||||
		{
 | 
			
		||||
			if (unique && sectionPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
			if (unique && sectionPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
			{
 | 
			
		||||
				tokenizer.seek(value);
 | 
			
		||||
				throw ParserException(tokenizer.location(), "only one “:" + sectionName + "” section allowed");
 | 
			
		||||
				throw ParserException(tokenizer, "only one “:" + sectionName + "” section allowed");
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			sectionPosition = value;
 | 
			
		||||
@@ -114,7 +114,7 @@ void ActionParser::findSections(ast::Action &action)
 | 
			
		||||
			const auto sectionIdentifier = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
			tokenizer.seek(position);
 | 
			
		||||
			throw ParserException(tokenizer.location(), "unknown action section “" + sectionIdentifier + "”");
 | 
			
		||||
			throw ParserException(tokenizer, "unknown action section “" + sectionIdentifier + "”");
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		tokenizer.expect<std::string>("(");
 | 
			
		||||
@@ -181,7 +181,7 @@ void ActionParser::parseVarsSection(ast::Action &action)
 | 
			
		||||
	tokenizer.expect<std::string>(":vars");
 | 
			
		||||
	tokenizer.expect<std::string>("(");
 | 
			
		||||
 | 
			
		||||
	m_context.warningCallback(tokenizer.location(), "“vars” section is not part of the PDDL 3.1 specification, treating it like additional “parameters” section");
 | 
			
		||||
	m_context.warningCallback(tokenizer, "“vars” section is not part of the PDDL 3.1 specification, treating it like additional “parameters” section");
 | 
			
		||||
 | 
			
		||||
	parseAndAddVariableDeclarations(m_context, m_domain, action.parameters);
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -73,7 +73,7 @@ ast::ConstantPointer parseConstant(Context &context, ASTContext &astContext)
 | 
			
		||||
	auto constant = findConstant(constantName, astContext);
 | 
			
		||||
 | 
			
		||||
	if (!constant)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "undeclared constant “" + constantName + "”");
 | 
			
		||||
		throw ParserException(tokenizer, "undeclared constant “" + constantName + "”");
 | 
			
		||||
 | 
			
		||||
	return std::move(constant.value());
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -19,8 +19,8 @@ namespace detail
 | 
			
		||||
 | 
			
		||||
DescriptionParser::DescriptionParser(Context &context)
 | 
			
		||||
:	m_context{context},
 | 
			
		||||
	m_domainPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_problemPosition{tokenize::Stream::InvalidPosition}
 | 
			
		||||
	m_domainPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_problemPosition{tokenize::InvalidStreamPosition}
 | 
			
		||||
{
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -33,7 +33,7 @@ ast::Description DescriptionParser::parse()
 | 
			
		||||
 | 
			
		||||
	findSections();
 | 
			
		||||
 | 
			
		||||
	if (m_domainPosition == tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_domainPosition == tokenize::InvalidStreamPosition)
 | 
			
		||||
		throw ParserException("no PDDL domain specified");
 | 
			
		||||
 | 
			
		||||
	tokenizer.seek(m_domainPosition);
 | 
			
		||||
@@ -41,7 +41,7 @@ ast::Description DescriptionParser::parse()
 | 
			
		||||
	auto domain = DomainParser(m_context).parse();
 | 
			
		||||
 | 
			
		||||
	// If no problem is given, return just the domain
 | 
			
		||||
	if (m_problemPosition == tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_problemPosition == tokenize::InvalidStreamPosition)
 | 
			
		||||
		return {std::move(domain), std::experimental::nullopt};
 | 
			
		||||
 | 
			
		||||
	tokenizer.seek(m_problemPosition);
 | 
			
		||||
@@ -73,7 +73,7 @@ void DescriptionParser::findSections()
 | 
			
		||||
 | 
			
		||||
		if (m_context.mode == Mode::Compatibility && tokenizer.testAndReturn<std::string>("in-package"))
 | 
			
		||||
		{
 | 
			
		||||
			m_context.warningCallback(tokenizer.location(), "“in-package” section is not part of the PDDL 3.1 specification, ignoring section");
 | 
			
		||||
			m_context.warningCallback(tokenizer, "“in-package” section is not part of the PDDL 3.1 specification, ignoring section");
 | 
			
		||||
 | 
			
		||||
			skipSection(tokenizer);
 | 
			
		||||
			tokenizer.skipWhiteSpace();
 | 
			
		||||
@@ -86,8 +86,8 @@ void DescriptionParser::findSections()
 | 
			
		||||
 | 
			
		||||
		if (tokenizer.testAndSkip<std::string>("domain"))
 | 
			
		||||
		{
 | 
			
		||||
			if (m_domainPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
				throw ParserException(tokenizer.location(), "PDDL description may not contain two domains");
 | 
			
		||||
			if (m_domainPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
				throw ParserException(tokenizer, "PDDL description may not contain two domains");
 | 
			
		||||
 | 
			
		||||
			m_domainPosition = position;
 | 
			
		||||
			skipSection(tokenizer);
 | 
			
		||||
@@ -95,7 +95,7 @@ void DescriptionParser::findSections()
 | 
			
		||||
		}
 | 
			
		||||
		else if (m_context.tokenizer.testAndSkip<std::string>("problem"))
 | 
			
		||||
		{
 | 
			
		||||
			if (m_problemPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
			if (m_problemPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
				throw ParserException("PDDL description may not contain two problems currently");
 | 
			
		||||
 | 
			
		||||
			m_problemPosition = position;
 | 
			
		||||
@@ -105,7 +105,7 @@ void DescriptionParser::findSections()
 | 
			
		||||
		else
 | 
			
		||||
		{
 | 
			
		||||
			const auto sectionIdentifier = tokenizer.get<std::string>();
 | 
			
		||||
			throw ParserException(tokenizer.location(), "unknown PDDL section “" + sectionIdentifier + "”");
 | 
			
		||||
			throw ParserException(tokenizer, "unknown PDDL section “" + sectionIdentifier + "”");
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		tokenizer.skipWhiteSpace();
 | 
			
		||||
 
 | 
			
		||||
@@ -22,10 +22,10 @@ namespace detail
 | 
			
		||||
 | 
			
		||||
DomainParser::DomainParser(Context &context)
 | 
			
		||||
:	m_context{context},
 | 
			
		||||
	m_requirementsPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_typesPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_constantsPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_predicatesPosition{tokenize::Stream::InvalidPosition}
 | 
			
		||||
	m_requirementsPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_typesPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_constantsPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_predicatesPosition{tokenize::InvalidStreamPosition}
 | 
			
		||||
{
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -39,32 +39,32 @@ ast::DomainPointer DomainParser::parse()
 | 
			
		||||
 | 
			
		||||
	auto &tokenizer = m_context.tokenizer;
 | 
			
		||||
 | 
			
		||||
	if (m_requirementsPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_requirementsPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_requirementsPosition);
 | 
			
		||||
		parseRequirementSection(*domain);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if (m_typesPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_typesPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_typesPosition);
 | 
			
		||||
		parseTypeSection(*domain);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if (m_constantsPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_constantsPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_constantsPosition);
 | 
			
		||||
		parseConstantSection(*domain);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if (m_predicatesPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_predicatesPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_predicatesPosition);
 | 
			
		||||
		parsePredicateSection(*domain);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	for (size_t i = 0; i < m_actionPositions.size(); i++)
 | 
			
		||||
		if (m_actionPositions[i] != tokenize::Stream::InvalidPosition)
 | 
			
		||||
		if (m_actionPositions[i] != tokenize::InvalidStreamPosition)
 | 
			
		||||
		{
 | 
			
		||||
			tokenizer.seek(m_actionPositions[i]);
 | 
			
		||||
			parseActionSection(*domain);
 | 
			
		||||
@@ -93,10 +93,10 @@ void DomainParser::findSections(ast::Domain &domain)
 | 
			
		||||
	const auto setSectionPosition =
 | 
			
		||||
		[&](const std::string §ionName, auto §ionPosition, const auto value, bool unique = false)
 | 
			
		||||
		{
 | 
			
		||||
			if (unique && sectionPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
			if (unique && sectionPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
			{
 | 
			
		||||
				tokenizer.seek(value);
 | 
			
		||||
				throw ParserException(tokenizer.location(), "only one “:" + sectionName + "” section allowed");
 | 
			
		||||
				throw ParserException(tokenizer, "only one “:" + sectionName + "” section allowed");
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			sectionPosition = value;
 | 
			
		||||
@@ -125,7 +125,7 @@ void DomainParser::findSections(ast::Domain &domain)
 | 
			
		||||
			setSectionPosition("predicates", m_predicatesPosition, position, true);
 | 
			
		||||
		else if (tokenizer.testIdentifierAndSkip("action"))
 | 
			
		||||
		{
 | 
			
		||||
			m_actionPositions.emplace_back(tokenize::Stream::InvalidPosition);
 | 
			
		||||
			m_actionPositions.emplace_back(tokenize::InvalidStreamPosition);
 | 
			
		||||
			setSectionPosition("action", m_actionPositions.back(), position);
 | 
			
		||||
		}
 | 
			
		||||
		else if (tokenizer.testIdentifierAndSkip("functions")
 | 
			
		||||
@@ -137,7 +137,7 @@ void DomainParser::findSections(ast::Domain &domain)
 | 
			
		||||
 | 
			
		||||
			const auto sectionIdentifier = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
			m_context.warningCallback(tokenizer.location(), "section type “" + sectionIdentifier + "” currently unsupported, ignoring section");
 | 
			
		||||
			m_context.warningCallback(tokenizer, "section type “" + sectionIdentifier + "” currently unsupported, ignoring section");
 | 
			
		||||
 | 
			
		||||
			tokenizer.seek(sectionIdentifierPosition);
 | 
			
		||||
		}
 | 
			
		||||
@@ -146,7 +146,7 @@ void DomainParser::findSections(ast::Domain &domain)
 | 
			
		||||
			const auto sectionIdentifier = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
			tokenizer.seek(position);
 | 
			
		||||
			throw ParserException(tokenizer.location(), "unknown domain section “" + sectionIdentifier + "”");
 | 
			
		||||
			throw ParserException(tokenizer, "unknown domain section “" + sectionIdentifier + "”");
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// Skip section for now and parse it later
 | 
			
		||||
@@ -246,7 +246,7 @@ void DomainParser::parseTypeSection(ast::Domain &domain)
 | 
			
		||||
	while (tokenizer.currentCharacter() != ')')
 | 
			
		||||
	{
 | 
			
		||||
		if (tokenizer.currentCharacter() == '(')
 | 
			
		||||
			throw ParserException(tokenizer.location(), "only primitive types are allowed in type section");
 | 
			
		||||
			throw ParserException(tokenizer, "only primitive types are allowed in type section");
 | 
			
		||||
 | 
			
		||||
		parseAndAddPrimitiveTypeDeclarations(m_context, domain);
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -83,7 +83,7 @@ std::experimental::optional<ast::Effect> parseEffectBody(Context &context, ASTCo
 | 
			
		||||
	const auto expressionIdentifier = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
	tokenizer.seek(position);
 | 
			
		||||
	throw ParserException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in effect body");
 | 
			
		||||
	throw ParserException(tokenizer, "expression type “" + expressionIdentifier + "” unknown or not allowed in effect body");
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
@@ -144,7 +144,7 @@ std::experimental::optional<ast::ConditionalEffect> parseConditionalEffectBody(C
 | 
			
		||||
	const auto expressionIdentifier = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
	tokenizer.seek(position);
 | 
			
		||||
	throw ParserException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in conditional effect body");
 | 
			
		||||
	throw ParserException(tokenizer, "expression type “" + expressionIdentifier + "” unknown or not allowed in conditional effect body");
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
@@ -28,7 +28,7 @@ ast::InitialState parseInitialState(Context &context, ASTContext &astContext, Va
 | 
			
		||||
		auto fact = parseFact(context, astContext, variableStack);
 | 
			
		||||
 | 
			
		||||
		if (!fact)
 | 
			
		||||
			throw ParserException(tokenizer.location(), "invalid initial state fact");
 | 
			
		||||
			throw ParserException(tokenizer, "invalid initial state fact");
 | 
			
		||||
 | 
			
		||||
		initialState.facts.emplace_back(std::move(fact.value()));
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -99,7 +99,7 @@ std::experimental::optional<ast::Precondition> parsePreconditionBody(Context &co
 | 
			
		||||
	const auto expressionIdentifier = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
	tokenizer.seek(position);
 | 
			
		||||
	throw ParserException(tokenizer.location(), "expression type “" + expressionIdentifier + "” unknown or not allowed in precondition body");
 | 
			
		||||
	throw ParserException(tokenizer, "expression type “" + expressionIdentifier + "” unknown or not allowed in precondition body");
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
@@ -78,7 +78,7 @@ std::experimental::optional<ast::PredicatePointer> parsePredicate(Context &conte
 | 
			
		||||
	{
 | 
			
		||||
		// TODO: enumerate candidates and why they are incompatible
 | 
			
		||||
		tokenizer.seek(previousPosition);
 | 
			
		||||
		throw ParserException(tokenizer.location(), "no matching declaration found for predicate “" + name + "”");
 | 
			
		||||
		throw ParserException(tokenizer, "no matching declaration found for predicate “" + name + "”");
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	auto *declaration = matchingPredicateDeclaration->get();
 | 
			
		||||
 
 | 
			
		||||
@@ -22,7 +22,7 @@ ast::PrimitiveTypePointer parsePrimitiveType(Context &context, ast::Domain &doma
 | 
			
		||||
	auto typeName = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
	if (typeName.empty())
 | 
			
		||||
		throw ParserException(tokenizer.location(), "could not parse primitive type, expected identifier");
 | 
			
		||||
		throw ParserException(tokenizer, "could not parse primitive type, expected identifier");
 | 
			
		||||
 | 
			
		||||
	auto matchingType = std::find_if(types.begin(), types.end(),
 | 
			
		||||
		[&](auto &primitiveTypeDeclaration)
 | 
			
		||||
@@ -34,9 +34,9 @@ ast::PrimitiveTypePointer parsePrimitiveType(Context &context, ast::Domain &doma
 | 
			
		||||
	if (matchingType == types.end())
 | 
			
		||||
	{
 | 
			
		||||
		if (context.mode != Mode::Compatibility)
 | 
			
		||||
			throw ParserException(tokenizer.location(), "primitive type “" + typeName + "” used without or before declaration");
 | 
			
		||||
			throw ParserException(tokenizer, "primitive type “" + typeName + "” used without or before declaration");
 | 
			
		||||
 | 
			
		||||
		context.warningCallback(tokenizer.location(), "primitive type “" + typeName + "” used without or before declaration, silently adding declaration");
 | 
			
		||||
		context.warningCallback(tokenizer, "primitive type “" + typeName + "” used without or before declaration, silently adding declaration");
 | 
			
		||||
 | 
			
		||||
		types.emplace_back(std::make_unique<ast::PrimitiveTypeDeclaration>(std::move(typeName)));
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -22,11 +22,11 @@ namespace detail
 | 
			
		||||
ProblemParser::ProblemParser(Context &context, ast::Domain &domain)
 | 
			
		||||
:	m_context{context},
 | 
			
		||||
	m_domain{domain},
 | 
			
		||||
	m_domainPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_requirementsPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_objectsPosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_initialStatePosition{tokenize::Stream::InvalidPosition},
 | 
			
		||||
	m_goalPosition{tokenize::Stream::InvalidPosition}
 | 
			
		||||
	m_domainPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_requirementsPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_objectsPosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_initialStatePosition{tokenize::InvalidStreamPosition},
 | 
			
		||||
	m_goalPosition{tokenize::InvalidStreamPosition}
 | 
			
		||||
{
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -40,32 +40,32 @@ ast::ProblemPointer ProblemParser::parse()
 | 
			
		||||
 | 
			
		||||
	auto &tokenizer = m_context.tokenizer;
 | 
			
		||||
 | 
			
		||||
	if (m_domainPosition == tokenize::Stream::InvalidPosition)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "problem description does not specify a corresponding domain");
 | 
			
		||||
	if (m_domainPosition == tokenize::InvalidStreamPosition)
 | 
			
		||||
		throw ParserException(tokenizer, "problem description does not specify a corresponding domain");
 | 
			
		||||
 | 
			
		||||
	tokenizer.seek(m_domainPosition);
 | 
			
		||||
	parseDomainSection(*problem);
 | 
			
		||||
 | 
			
		||||
	if (m_requirementsPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_requirementsPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_requirementsPosition);
 | 
			
		||||
		parseRequirementSection(*problem);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if (m_objectsPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
	if (m_objectsPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
	{
 | 
			
		||||
		tokenizer.seek(m_objectsPosition);
 | 
			
		||||
		parseObjectSection(*problem);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if (m_initialStatePosition == tokenize::Stream::InvalidPosition)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "problem description does not specify an initial state");
 | 
			
		||||
	if (m_initialStatePosition == tokenize::InvalidStreamPosition)
 | 
			
		||||
		throw ParserException(tokenizer, "problem description does not specify an initial state");
 | 
			
		||||
 | 
			
		||||
	tokenizer.seek(m_initialStatePosition);
 | 
			
		||||
	parseInitialStateSection(*problem);
 | 
			
		||||
 | 
			
		||||
	if (m_goalPosition == tokenize::Stream::InvalidPosition)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "problem description does not specify a goal");
 | 
			
		||||
	if (m_goalPosition == tokenize::InvalidStreamPosition)
 | 
			
		||||
		throw ParserException(tokenizer, "problem description does not specify a goal");
 | 
			
		||||
 | 
			
		||||
	tokenizer.seek(m_goalPosition);
 | 
			
		||||
	parseGoalSection(*problem);
 | 
			
		||||
@@ -91,10 +91,10 @@ void ProblemParser::findSections(ast::Problem &problem)
 | 
			
		||||
	const auto setSectionPosition =
 | 
			
		||||
		[&](const std::string §ionName, auto §ionPosition, const auto value, bool unique = false)
 | 
			
		||||
		{
 | 
			
		||||
			if (unique && sectionPosition != tokenize::Stream::InvalidPosition)
 | 
			
		||||
			if (unique && sectionPosition != tokenize::InvalidStreamPosition)
 | 
			
		||||
			{
 | 
			
		||||
				tokenizer.seek(value);
 | 
			
		||||
				throw ParserException(tokenizer.location(), "only one “:" + sectionName + "” section allowed");
 | 
			
		||||
				throw ParserException(tokenizer, "only one “:" + sectionName + "” section allowed");
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			sectionPosition = value;
 | 
			
		||||
@@ -129,7 +129,7 @@ void ProblemParser::findSections(ast::Problem &problem)
 | 
			
		||||
 | 
			
		||||
			const auto sectionIdentifier = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
			m_context.warningCallback(tokenizer.location(), "section type “" + sectionIdentifier + "” currently unsupported, ignoring section");
 | 
			
		||||
			m_context.warningCallback(tokenizer, "section type “" + sectionIdentifier + "” currently unsupported, ignoring section");
 | 
			
		||||
 | 
			
		||||
			tokenizer.seek(sectionIdentifierPosition);
 | 
			
		||||
		}
 | 
			
		||||
@@ -138,7 +138,7 @@ void ProblemParser::findSections(ast::Problem &problem)
 | 
			
		||||
			const auto sectionIdentifier = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
			tokenizer.seek(position);
 | 
			
		||||
			throw ParserException(tokenizer.location(), "unknown problem section “" + sectionIdentifier + "”");
 | 
			
		||||
			throw ParserException(tokenizer, "unknown problem section “" + sectionIdentifier + "”");
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// Skip section for now and parse it later
 | 
			
		||||
@@ -165,7 +165,7 @@ void ProblemParser::parseDomainSection(ast::Problem &problem)
 | 
			
		||||
	const auto domainName = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
	if (problem.domain->name != domainName)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "domains do not match (“" + problem.domain->name + "” and “" + domainName + "”)");
 | 
			
		||||
		throw ParserException(tokenizer, "domains do not match (“" + problem.domain->name + "” and “" + domainName + "”)");
 | 
			
		||||
 | 
			
		||||
	tokenizer.expect<std::string>(")");
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -67,7 +67,7 @@ std::experimental::optional<ast::Requirement> parseRequirement(Context &context)
 | 
			
		||||
		return matchingRequirement->second;
 | 
			
		||||
 | 
			
		||||
	if (context.mode == Mode::Compatibility && (requirementName == "goal-utilities" || requirementName == "domain-axioms"))
 | 
			
		||||
		context.warningCallback(tokenizer.location(), "“" + requirementName + "” requirement is not part of the PDDL 3.1 specification, ignoring requirement");
 | 
			
		||||
		context.warningCallback(tokenizer, "“" + requirementName + "” requirement is not part of the PDDL 3.1 specification, ignoring requirement");
 | 
			
		||||
 | 
			
		||||
	return std::experimental::nullopt;
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -37,7 +37,7 @@ ast::Type parseType(Context &context, ast::Domain &domain)
 | 
			
		||||
		auto eitherType = parseEither<ast::PrimitiveTypePointer>(context, astContext, variableStack, parsePrimitiveTypeWrapper);
 | 
			
		||||
 | 
			
		||||
		if (!eitherType)
 | 
			
		||||
			throw ParserException(tokenizer.location(), "expected primitive type or “either” expression");
 | 
			
		||||
			throw ParserException(tokenizer, "expected primitive type or “either” expression");
 | 
			
		||||
 | 
			
		||||
		return std::move(eitherType.value());
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -22,7 +22,7 @@ ast::UnsupportedPointer parseUnsupported(Context &context)
 | 
			
		||||
 | 
			
		||||
	auto expressionType = tokenizer.getIdentifier();
 | 
			
		||||
 | 
			
		||||
	context.warningCallback(tokenizer.location(), "expression type “" + expressionType + "” currently unsupported in this context, substituting it with placeholder");
 | 
			
		||||
	context.warningCallback(tokenizer, "expression type “" + expressionType + "” currently unsupported in this context, substituting it with placeholder");
 | 
			
		||||
 | 
			
		||||
	skipSection(tokenizer);
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -64,7 +64,7 @@ ast::VariablePointer parseVariable(Context &context, VariableStack &variableStac
 | 
			
		||||
	auto variableDeclaration = variableStack.findVariableDeclaration(variableName);
 | 
			
		||||
 | 
			
		||||
	if (!variableDeclaration)
 | 
			
		||||
		throw ParserException(tokenizer.location(), "undeclared variable “" + variableName + "”");
 | 
			
		||||
		throw ParserException(tokenizer, "undeclared variable “" + variableName + "”");
 | 
			
		||||
 | 
			
		||||
	return std::make_unique<ast::Variable>(variableDeclaration.value());
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,7 @@
 | 
			
		||||
#ifndef __TOKENIZE__LOCATION_H
 | 
			
		||||
#define __TOKENIZE__LOCATION_H
 | 
			
		||||
 | 
			
		||||
#include <cstdlib>
 | 
			
		||||
#include <tokenize/StreamPosition.h>
 | 
			
		||||
 | 
			
		||||
namespace tokenize
 | 
			
		||||
{
 | 
			
		||||
@@ -12,16 +12,41 @@ namespace tokenize
 | 
			
		||||
//
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
struct Location
 | 
			
		||||
class Stream;
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
class Location
 | 
			
		||||
{
 | 
			
		||||
	const char *sectionStart = nullptr;
 | 
			
		||||
	const char *sectionEnd = nullptr;
 | 
			
		||||
	public:
 | 
			
		||||
		Location(Stream &stream);
 | 
			
		||||
		Location(Stream &stream, StreamPosition position);
 | 
			
		||||
 | 
			
		||||
	std::size_t rowStart = -1;
 | 
			
		||||
	std::size_t rowEnd = -1;
 | 
			
		||||
		const char *sectionStart() const;
 | 
			
		||||
		const char *sectionEnd() const;
 | 
			
		||||
 | 
			
		||||
	std::size_t columnStart = -1;
 | 
			
		||||
	std::size_t columnEnd = -1;
 | 
			
		||||
		StreamPosition rowStart() const;
 | 
			
		||||
		StreamPosition rowEnd() const;
 | 
			
		||||
 | 
			
		||||
		StreamPosition columnStart() const;
 | 
			
		||||
		StreamPosition columnEnd() const;
 | 
			
		||||
 | 
			
		||||
	private:
 | 
			
		||||
		void initializeLazily() const;
 | 
			
		||||
 | 
			
		||||
		Stream &m_stream;
 | 
			
		||||
		const StreamPosition m_position;
 | 
			
		||||
 | 
			
		||||
		mutable bool m_isInitialized{false};
 | 
			
		||||
 | 
			
		||||
		mutable const char *m_sectionStart{nullptr};
 | 
			
		||||
		mutable const char *m_sectionEnd{nullptr};
 | 
			
		||||
 | 
			
		||||
		mutable StreamPosition m_rowStart{InvalidStreamPosition};
 | 
			
		||||
		mutable StreamPosition m_rowEnd{InvalidStreamPosition};
 | 
			
		||||
 | 
			
		||||
		mutable StreamPosition m_columnStart{InvalidStreamPosition};
 | 
			
		||||
		mutable StreamPosition m_columnEnd{InvalidStreamPosition};
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
@@ -9,6 +9,7 @@
 | 
			
		||||
#include <vector>
 | 
			
		||||
 | 
			
		||||
#include <tokenize/Location.h>
 | 
			
		||||
#include <tokenize/StreamPosition.h>
 | 
			
		||||
#include <tokenize/TokenizerException.h>
 | 
			
		||||
 | 
			
		||||
namespace tokenize
 | 
			
		||||
@@ -23,12 +24,9 @@ namespace tokenize
 | 
			
		||||
class Stream
 | 
			
		||||
{
 | 
			
		||||
	public:
 | 
			
		||||
		using Position = size_t;
 | 
			
		||||
		static const Position InvalidPosition;
 | 
			
		||||
 | 
			
		||||
		struct Delimiter
 | 
			
		||||
		{
 | 
			
		||||
			Position position;
 | 
			
		||||
			StreamPosition position;
 | 
			
		||||
			std::string sectionName;
 | 
			
		||||
		};
 | 
			
		||||
 | 
			
		||||
@@ -46,15 +44,17 @@ class Stream
 | 
			
		||||
		void read(const std::experimental::filesystem::path &path);
 | 
			
		||||
 | 
			
		||||
		void reset();
 | 
			
		||||
		void seek(Position position);
 | 
			
		||||
		Position position() const;
 | 
			
		||||
		Location location() const;
 | 
			
		||||
		void seek(StreamPosition position);
 | 
			
		||||
		StreamPosition position() const;
 | 
			
		||||
 | 
			
		||||
		char currentCharacter() const
 | 
			
		||||
		const std::vector<Delimiter> &delimiters() const
 | 
			
		||||
		{
 | 
			
		||||
			assert(m_position < m_stream.size());
 | 
			
		||||
			return m_delimiters;
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
			// TODO: check if this should be secured by check()
 | 
			
		||||
		char currentCharacter()
 | 
			
		||||
		{
 | 
			
		||||
			check();
 | 
			
		||||
			return m_stream[m_position];
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
@@ -69,15 +69,15 @@ class Stream
 | 
			
		||||
			return m_position >= m_stream.size();
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		void check() const
 | 
			
		||||
		void check()
 | 
			
		||||
		{
 | 
			
		||||
			if (atEnd())
 | 
			
		||||
				throw TokenizerException(location(), "reading past end of file");
 | 
			
		||||
				throw TokenizerException(*this, "reading past end of file");
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
	protected:
 | 
			
		||||
		std::string m_stream;
 | 
			
		||||
		mutable Position m_position;
 | 
			
		||||
		mutable StreamPosition m_position{0};
 | 
			
		||||
 | 
			
		||||
		std::vector<Delimiter> m_delimiters;
 | 
			
		||||
};
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										23
									
								
								lib/tokenize/include/tokenize/StreamPosition.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								lib/tokenize/include/tokenize/StreamPosition.h
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,23 @@
 | 
			
		||||
#ifndef __TOKENIZE__STREAM_POSITION_H
 | 
			
		||||
#define __TOKENIZE__STREAM_POSITION_H
 | 
			
		||||
 | 
			
		||||
#include <cstddef>
 | 
			
		||||
#include <limits>
 | 
			
		||||
 | 
			
		||||
namespace tokenize
 | 
			
		||||
{
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
//
 | 
			
		||||
// StreamPosition
 | 
			
		||||
//
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
using StreamPosition = size_t;
 | 
			
		||||
static const StreamPosition InvalidStreamPosition{std::numeric_limits<StreamPosition>::max()};
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#endif
 | 
			
		||||
@@ -189,7 +189,7 @@ void Tokenizer<TokenizerPolicy>::expect(const Type &expectedValue)
 | 
			
		||||
	std::stringstream message;
 | 
			
		||||
	message << "unexpected value, expected “" << expectedValue << "”";
 | 
			
		||||
 | 
			
		||||
	throw TokenizerException(location(), message.str());
 | 
			
		||||
	throw TokenizerException(*this, message.str());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
@@ -208,7 +208,7 @@ std::string Tokenizer<TokenizerPolicy>::getIdentifier()
 | 
			
		||||
		if (!TokenizerPolicy::isIdentifierCharacter(character))
 | 
			
		||||
		{
 | 
			
		||||
			if (value.empty())
 | 
			
		||||
				throw TokenizerException(location(), "could not parse identifier");
 | 
			
		||||
				throw TokenizerException(*this, "could not parse identifier");
 | 
			
		||||
 | 
			
		||||
			return value;
 | 
			
		||||
		}
 | 
			
		||||
@@ -406,7 +406,7 @@ uint64_t Tokenizer<TokenizerPolicy>::getIntegerBody()
 | 
			
		||||
	check();
 | 
			
		||||
 | 
			
		||||
	if (!std::isdigit(currentCharacter()))
 | 
			
		||||
		throw TokenizerException(location(), "could not read integer value");
 | 
			
		||||
		throw TokenizerException(*this, "could not read integer value");
 | 
			
		||||
 | 
			
		||||
	uint64_t value = 0;
 | 
			
		||||
 | 
			
		||||
@@ -448,7 +448,7 @@ uint64_t Tokenizer<TokenizerPolicy>::getImpl(Tag<uint64_t>)
 | 
			
		||||
	skipWhiteSpace();
 | 
			
		||||
 | 
			
		||||
	if (currentCharacter() == '-')
 | 
			
		||||
		throw TokenizerException(location(), "expected unsigned integer, got signed one");
 | 
			
		||||
		throw TokenizerException(*this, "expected unsigned integer, got signed one");
 | 
			
		||||
 | 
			
		||||
	return getIntegerBody();
 | 
			
		||||
}
 | 
			
		||||
@@ -482,7 +482,7 @@ bool Tokenizer<TokenizerPolicy>::getImpl(Tag<bool>)
 | 
			
		||||
	if (testAndSkip<char>('1'))
 | 
			
		||||
		return true;
 | 
			
		||||
 | 
			
		||||
	throw TokenizerException(location(), "could not read Boolean value");
 | 
			
		||||
	throw TokenizerException(*this, "could not read Boolean value");
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
@@ -30,10 +30,7 @@ class TokenizerException: public std::exception
 | 
			
		||||
 | 
			
		||||
		explicit TokenizerException(const Location &location, const std::string &message)
 | 
			
		||||
		:	m_location{location},
 | 
			
		||||
			m_message{message},
 | 
			
		||||
			// TODO: refactor
 | 
			
		||||
			m_plainMessage{std::string(m_location.sectionStart) + ":" + std::to_string(m_location.rowStart)
 | 
			
		||||
				+ ":" + std::to_string(m_location.columnStart) + " " + m_message}
 | 
			
		||||
			m_message{message}
 | 
			
		||||
		{
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
@@ -41,7 +38,7 @@ class TokenizerException: public std::exception
 | 
			
		||||
 | 
			
		||||
		const char *what() const noexcept
 | 
			
		||||
		{
 | 
			
		||||
			return m_plainMessage.c_str();
 | 
			
		||||
			return m_message.c_str();
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		const Location &location() const
 | 
			
		||||
@@ -57,7 +54,6 @@ class TokenizerException: public std::exception
 | 
			
		||||
	private:
 | 
			
		||||
		Location m_location;
 | 
			
		||||
		std::string m_message;
 | 
			
		||||
		std::string m_plainMessage;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										150
									
								
								lib/tokenize/src/tokenize/Location.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										150
									
								
								lib/tokenize/src/tokenize/Location.cpp
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,150 @@
 | 
			
		||||
#include <tokenize/Location.h>
 | 
			
		||||
 | 
			
		||||
#include <algorithm>
 | 
			
		||||
 | 
			
		||||
#include <tokenize/Stream.h>
 | 
			
		||||
 | 
			
		||||
namespace tokenize
 | 
			
		||||
{
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
//
 | 
			
		||||
// Location
 | 
			
		||||
//
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
Location::Location(Stream &stream)
 | 
			
		||||
:	m_stream{stream},
 | 
			
		||||
	m_position{stream.position()}
 | 
			
		||||
{
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
Location::Location(Stream &stream, StreamPosition position)
 | 
			
		||||
:	m_stream{stream},
 | 
			
		||||
	m_position{position}
 | 
			
		||||
{
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
const char *Location::sectionStart() const
 | 
			
		||||
{
 | 
			
		||||
	if (!m_isInitialized)
 | 
			
		||||
		initializeLazily();
 | 
			
		||||
 | 
			
		||||
	return m_sectionStart;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
const char *Location::sectionEnd() const
 | 
			
		||||
{
 | 
			
		||||
	if (!m_isInitialized)
 | 
			
		||||
		initializeLazily();
 | 
			
		||||
 | 
			
		||||
	return m_sectionEnd;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
StreamPosition Location::rowStart() const
 | 
			
		||||
{
 | 
			
		||||
	if (!m_isInitialized)
 | 
			
		||||
		initializeLazily();
 | 
			
		||||
 | 
			
		||||
	return m_rowStart;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
StreamPosition Location::rowEnd() const
 | 
			
		||||
{
 | 
			
		||||
	if (!m_isInitialized)
 | 
			
		||||
		initializeLazily();
 | 
			
		||||
 | 
			
		||||
	return m_rowEnd;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
StreamPosition Location::columnStart() const
 | 
			
		||||
{
 | 
			
		||||
	if (!m_isInitialized)
 | 
			
		||||
		initializeLazily();
 | 
			
		||||
 | 
			
		||||
	return m_columnStart;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
StreamPosition Location::columnEnd() const
 | 
			
		||||
{
 | 
			
		||||
	if (!m_isInitialized)
 | 
			
		||||
		initializeLazily();
 | 
			
		||||
 | 
			
		||||
	return m_columnEnd;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
void Location::initializeLazily() const
 | 
			
		||||
{
 | 
			
		||||
	const auto previousPosition = m_stream.position();
 | 
			
		||||
 | 
			
		||||
	const auto &delimiters = m_stream.delimiters();
 | 
			
		||||
 | 
			
		||||
	// Find current section
 | 
			
		||||
	auto currentFile = std::find_if(delimiters.crbegin(), delimiters.crend(),
 | 
			
		||||
		[&](const auto &fileDelimiter)
 | 
			
		||||
		{
 | 
			
		||||
			return m_position >= fileDelimiter.position;
 | 
			
		||||
		});
 | 
			
		||||
 | 
			
		||||
	// If the tokenizer is at the end of the stream, still count from the beginning of the last section
 | 
			
		||||
	if (currentFile == delimiters.crend())
 | 
			
		||||
		currentFile = delimiters.crbegin();
 | 
			
		||||
 | 
			
		||||
	// Go back to beginning of section
 | 
			
		||||
	m_stream.seek(currentFile->position);
 | 
			
		||||
 | 
			
		||||
	StreamPosition row{1};
 | 
			
		||||
	StreamPosition column{1};
 | 
			
		||||
 | 
			
		||||
	// Compute the location character by character
 | 
			
		||||
	while (true)
 | 
			
		||||
	{
 | 
			
		||||
		if (m_stream.atEnd())
 | 
			
		||||
			break;
 | 
			
		||||
		else if (m_stream.position() >= m_position)
 | 
			
		||||
			break;
 | 
			
		||||
 | 
			
		||||
		const auto character = m_stream.currentCharacter();
 | 
			
		||||
 | 
			
		||||
		if (character == '\n')
 | 
			
		||||
		{
 | 
			
		||||
			row++;
 | 
			
		||||
			column = 1;
 | 
			
		||||
		}
 | 
			
		||||
		else if (std::isblank(character) || std::isprint(character))
 | 
			
		||||
			column++;
 | 
			
		||||
 | 
			
		||||
		m_stream.advance();
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	m_sectionStart = currentFile->sectionName.c_str();
 | 
			
		||||
	m_sectionEnd = currentFile->sectionName.c_str();
 | 
			
		||||
	m_rowStart = row;
 | 
			
		||||
	m_rowEnd = row;
 | 
			
		||||
	m_columnStart = column;
 | 
			
		||||
	m_columnEnd = column;
 | 
			
		||||
 | 
			
		||||
	m_isInitialized = true;
 | 
			
		||||
 | 
			
		||||
	m_stream.seek(previousPosition);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
@@ -1,6 +1,5 @@
 | 
			
		||||
#include <tokenize/Stream.h>
 | 
			
		||||
 | 
			
		||||
#include <algorithm>
 | 
			
		||||
#include <fstream>
 | 
			
		||||
 | 
			
		||||
namespace tokenize
 | 
			
		||||
@@ -12,12 +11,7 @@ namespace tokenize
 | 
			
		||||
//
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
const Stream::Position Stream::InvalidPosition{std::numeric_limits<Position>::max()};
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
Stream::Stream()
 | 
			
		||||
:	m_position{0}
 | 
			
		||||
{
 | 
			
		||||
	std::setlocale(LC_NUMERIC, "C");
 | 
			
		||||
}
 | 
			
		||||
@@ -34,18 +28,16 @@ Stream::Stream(std::string streamName, std::istream &istream)
 | 
			
		||||
void Stream::read(std::string streamName, std::istream &istream)
 | 
			
		||||
{
 | 
			
		||||
	// Store position of new section
 | 
			
		||||
	const auto position = m_stream.size();
 | 
			
		||||
	m_delimiters.push_back({m_stream.size(), streamName});
 | 
			
		||||
 | 
			
		||||
	m_delimiters.push_back({position, streamName});
 | 
			
		||||
 | 
			
		||||
	istream.seekg(0, std::ios::end);
 | 
			
		||||
	/*istream.seekg(0, std::ios::end);
 | 
			
		||||
	const auto streamSize = istream.tellg();
 | 
			
		||||
	istream.seekg(0, std::ios::beg);
 | 
			
		||||
 | 
			
		||||
	const auto startPosition = m_stream.size();
 | 
			
		||||
 | 
			
		||||
	m_stream.resize(m_stream.size() + streamSize);
 | 
			
		||||
	std::copy(std::istreambuf_iterator<char>(istream), std::istreambuf_iterator<char>(), m_stream.begin() + startPosition);
 | 
			
		||||
	m_stream.resize(m_stream.size() + streamSize);*/
 | 
			
		||||
	std::copy(std::istreambuf_iterator<char>(istream), std::istreambuf_iterator<char>(), std::back_inserter(m_stream));
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
@@ -69,65 +61,18 @@ void Stream::reset()
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
void Stream::seek(Position position)
 | 
			
		||||
void Stream::seek(StreamPosition position)
 | 
			
		||||
{
 | 
			
		||||
	m_position = position;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
typename Stream::Position Stream::position() const
 | 
			
		||||
StreamPosition Stream::position() const
 | 
			
		||||
{
 | 
			
		||||
	return m_position;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
Location Stream::location() const
 | 
			
		||||
{
 | 
			
		||||
	const auto currentPosition = position();
 | 
			
		||||
 | 
			
		||||
	// Find current section
 | 
			
		||||
	auto currentFile = std::find_if(m_delimiters.crbegin(), m_delimiters.crend(),
 | 
			
		||||
		[&](const auto &fileDelimiter)
 | 
			
		||||
		{
 | 
			
		||||
			return currentPosition >= fileDelimiter.position;
 | 
			
		||||
		});
 | 
			
		||||
 | 
			
		||||
	// If the tokenizer is at the end of the stream, still count from the beginning of the last section
 | 
			
		||||
	if (currentFile == m_delimiters.crend())
 | 
			
		||||
		currentFile = m_delimiters.crbegin();
 | 
			
		||||
 | 
			
		||||
	// Go back to beginning of section
 | 
			
		||||
	m_position = currentFile->position;
 | 
			
		||||
 | 
			
		||||
	size_t row = 1;
 | 
			
		||||
	size_t column = 1;
 | 
			
		||||
 | 
			
		||||
	// Compute the location character by character
 | 
			
		||||
	while (true)
 | 
			
		||||
	{
 | 
			
		||||
		if (currentPosition >= m_stream.size() && atEnd())
 | 
			
		||||
			break;
 | 
			
		||||
		else if (currentPosition < m_stream.size() && position() >= currentPosition)
 | 
			
		||||
			break;
 | 
			
		||||
 | 
			
		||||
		const auto character = currentCharacter();
 | 
			
		||||
 | 
			
		||||
		if (character == '\n')
 | 
			
		||||
		{
 | 
			
		||||
			row++;
 | 
			
		||||
			column = 1;
 | 
			
		||||
		}
 | 
			
		||||
		else if (std::isblank(character) || std::isprint(character))
 | 
			
		||||
			column++;
 | 
			
		||||
 | 
			
		||||
		m_position++;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return {currentFile->sectionName.c_str(), currentFile->sectionName.c_str(), row, row, column, column};
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
////////////////////////////////////////////////////////////////////////////////////////////////////
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -72,7 +72,7 @@ TEST_CASE("[tokenizer] While tokenizing, the cursor position is as expected", "[
 | 
			
		||||
	std::stringstream s("  identifier  5   \n-51\t 0 1");
 | 
			
		||||
	tokenize::Tokenizer<> p("input", s);
 | 
			
		||||
 | 
			
		||||
	tokenize::Tokenizer<>::Position pos;
 | 
			
		||||
	tokenize::StreamPosition pos;
 | 
			
		||||
 | 
			
		||||
	pos = p.position();
 | 
			
		||||
	REQUIRE(p.testAndReturn<std::string>("error") == false);
 | 
			
		||||
@@ -181,86 +181,108 @@ TEST_CASE("[tokenizer] While tokenizing, the cursor location is as expcected", "
 | 
			
		||||
 | 
			
		||||
	const auto startPosition = p.position();
 | 
			
		||||
 | 
			
		||||
	tokenize::Location l;
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 1u);
 | 
			
		||||
	REQUIRE(l.columnStart == 1u);
 | 
			
		||||
	REQUIRE(p.currentCharacter() == '1');
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 1u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 1u);
 | 
			
		||||
		REQUIRE(p.currentCharacter() == '1');
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.advance());
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 1u);
 | 
			
		||||
	REQUIRE(l.columnStart == 2u);
 | 
			
		||||
	REQUIRE(p.currentCharacter() == '2');
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 1u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 2u);
 | 
			
		||||
		REQUIRE(p.currentCharacter() == '2');
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.advance());
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 1u);
 | 
			
		||||
	REQUIRE(l.columnStart == 3u);
 | 
			
		||||
	REQUIRE(p.currentCharacter() == '3');
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 1u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 3u);
 | 
			
		||||
		REQUIRE(p.currentCharacter() == '3');
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.advance());
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 1u);
 | 
			
		||||
	REQUIRE(l.columnStart == 4u);
 | 
			
		||||
	REQUIRE(p.currentCharacter() == ' ');
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 1u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 4u);
 | 
			
		||||
		REQUIRE(p.currentCharacter() == ' ');
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.advance());
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 1u);
 | 
			
		||||
	REQUIRE(l.columnStart == 5u);
 | 
			
		||||
	REQUIRE(p.currentCharacter() == '\n');
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 1u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 5u);
 | 
			
		||||
		REQUIRE(p.currentCharacter() == '\n');
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.advance());
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 2u);
 | 
			
		||||
	REQUIRE(l.columnStart == 1u);
 | 
			
		||||
	REQUIRE(p.currentCharacter() == '4');
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 2u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 1u);
 | 
			
		||||
		REQUIRE(p.currentCharacter() == '4');
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.advance());
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.expect<std::string>("test1"));
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 3u);
 | 
			
		||||
	REQUIRE(l.columnStart == 6u);
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 3u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 6u);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.expect<std::string>("test2"));
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 4u);
 | 
			
		||||
	REQUIRE(l.columnStart == 7u);
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 4u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 7u);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.expect<std::string>("test3"));
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 5u);
 | 
			
		||||
	REQUIRE(l.columnStart == 6u);
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 5u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 6u);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.skipLine());
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 6u);
 | 
			
		||||
	REQUIRE(l.columnStart == 1u);
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 6u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 1u);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.skipLine());
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 7u);
 | 
			
		||||
	REQUIRE(l.columnStart == 1u);
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 7u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 1u);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p.skipWhiteSpace());
 | 
			
		||||
 | 
			
		||||
	l = p.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 10u);
 | 
			
		||||
	REQUIRE(l.columnStart == 1u);
 | 
			
		||||
	REQUIRE(p.atEnd());
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p);
 | 
			
		||||
		REQUIRE(l.rowStart() == 10u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 1u);
 | 
			
		||||
		REQUIRE(p.atEnd());
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	p.reset();
 | 
			
		||||
	REQUIRE(p.position() == startPosition);
 | 
			
		||||
@@ -289,19 +311,21 @@ TEST_CASE("[tokenizer] Comments are correctly removed", "[tokenizer]")
 | 
			
		||||
 | 
			
		||||
	p1.removeComments(";", "\n", false);
 | 
			
		||||
 | 
			
		||||
	tokenize::Location l;
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p1.expect<std::string>("test1"));
 | 
			
		||||
 | 
			
		||||
	l = p1.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 2u);
 | 
			
		||||
	REQUIRE(l.columnStart == 6u);
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p1);
 | 
			
		||||
		REQUIRE(l.rowStart() == 2u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 6u);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	REQUIRE_NOTHROW(p1.expect<std::string>("test2"));
 | 
			
		||||
 | 
			
		||||
	l = p1.location();
 | 
			
		||||
	REQUIRE(l.rowStart == 3u);
 | 
			
		||||
	REQUIRE(l.columnStart == 6u);
 | 
			
		||||
	{
 | 
			
		||||
		auto l = tokenize::Location(p1);
 | 
			
		||||
		REQUIRE(l.rowStart() == 3u);
 | 
			
		||||
		REQUIRE(l.columnStart() == 6u);
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	p1.skipWhiteSpace();
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user