blob: 448490ad427a2286ca33837522d20a382cbdc573 [file] [log] [blame]
// Copyright 2017-2020 The Verible Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "common/text/token_info_test_util.h"
#include <vector>
#include "gtest/gtest.h"
#include "absl/strings/string_view.h"
#include "common/text/token_info.h"
#include "common/util/logging.h"
namespace verible {
namespace {
TEST(TokenInfoTestDataTest, EmptyInitializer) {
const TokenInfoTestData test_data{};
EXPECT_TRUE(test_data.code.empty());
EXPECT_TRUE(test_data.expected_tokens.empty());
}
TEST(TokenInfoTestDataTest, ConstructorStringLiteral) {
const TokenInfoTestData test_data = {
{"foo"},
{"bar"},
};
EXPECT_EQ(test_data.code, "foobar");
const auto& first(test_data.expected_tokens.front());
EXPECT_EQ(first.text(), "foo");
EXPECT_EQ(first.token_enum(), ExpectedTokenInfo::kDontCare);
EXPECT_EQ(first.left(test_data.code), 0);
EXPECT_EQ(first.right(test_data.code), 3);
const auto& second(test_data.expected_tokens.back());
EXPECT_EQ(second.text(), "bar");
EXPECT_EQ(second.token_enum(), ExpectedTokenInfo::kDontCare);
EXPECT_EQ(second.left(test_data.code), 3);
EXPECT_EQ(second.right(test_data.code), 6);
}
TEST(ExpectedTokenInfoTest, ConstructorCharEnum) {
ExpectedTokenInfo token(';');
EXPECT_EQ(token.token_enum(), ';');
EXPECT_EQ(token.text(), ";");
EXPECT_EQ(token.left(token.text()), 0);
EXPECT_EQ(token.right(token.text()), 1);
}
TEST(TokenInfoTestDataTest, ConstructorCharEnum) {
const TokenInfoTestData test_data = {
{'('},
{')'},
};
EXPECT_EQ(test_data.code, "()");
const auto& first(test_data.expected_tokens.front());
EXPECT_EQ(first.text(), "(");
EXPECT_EQ(first.token_enum(), '(');
EXPECT_EQ(first.left(test_data.code), 0);
EXPECT_EQ(first.right(test_data.code), 1);
const auto& second(test_data.expected_tokens.back());
EXPECT_EQ(second.text(), ")");
EXPECT_EQ(second.token_enum(), ')');
EXPECT_EQ(second.left(test_data.code), 1);
EXPECT_EQ(second.right(test_data.code), 2);
}
TEST(ExpectedTokenInfoTest, ConstructorEnumAndStringLiteral) {
ExpectedTokenInfo token(11, "f00");
EXPECT_EQ(token.token_enum(), 11);
EXPECT_EQ(token.text(), "f00");
}
TEST(TokenInfoTestDataTest, ConstructorEnumAndStringLiteral) {
const TokenInfoTestData test_data = {
{8, "foo"},
{7, "bar"},
};
EXPECT_EQ(test_data.code, "foobar");
const auto& first(test_data.expected_tokens.front());
EXPECT_EQ(first.text(), "foo");
EXPECT_EQ(first.token_enum(), 8);
EXPECT_EQ(first.left(test_data.code), 0);
EXPECT_EQ(first.right(test_data.code), 3);
const auto& second(test_data.expected_tokens.back());
EXPECT_EQ(second.text(), "bar");
EXPECT_EQ(second.token_enum(), 7);
EXPECT_EQ(second.left(test_data.code), 3);
EXPECT_EQ(second.right(test_data.code), 6);
}
TEST(TokenInfoTestDataTest, ConstructorHeterogeneousInitializer) {
// This example best represents a typical lexer test case that verifies a
// sequence of tokens. The token enumeration values would realistically be
// written as enumeration values generated by yacc/bison.
const TokenInfoTestData test_data = {{8, "foo"}, " ", '(', {7, "bar"}, ')'};
EXPECT_EQ(test_data.code, "foo (bar)");
const auto& expected_tokens = test_data.expected_tokens;
ASSERT_EQ(expected_tokens.size(), 5);
EXPECT_EQ(expected_tokens[0].text(), "foo");
EXPECT_EQ(expected_tokens[0].token_enum(), 8);
EXPECT_EQ(expected_tokens[0].left(test_data.code), 0);
EXPECT_EQ(expected_tokens[0].right(test_data.code), 3);
EXPECT_EQ(expected_tokens[1].text(), " ");
EXPECT_EQ(expected_tokens[1].token_enum(), ExpectedTokenInfo::kDontCare);
EXPECT_EQ(expected_tokens[1].left(test_data.code), 3);
EXPECT_EQ(expected_tokens[1].right(test_data.code), 4);
EXPECT_EQ(expected_tokens[2].text(), "(");
EXPECT_EQ(expected_tokens[2].token_enum(), '(');
EXPECT_EQ(expected_tokens[2].left(test_data.code), 4);
EXPECT_EQ(expected_tokens[2].right(test_data.code), 5);
EXPECT_EQ(expected_tokens[3].text(), "bar");
EXPECT_EQ(expected_tokens[3].token_enum(), 7);
EXPECT_EQ(expected_tokens[3].left(test_data.code), 5);
EXPECT_EQ(expected_tokens[3].right(test_data.code), 8);
EXPECT_EQ(expected_tokens[4].text(), ")");
EXPECT_EQ(expected_tokens[4].token_enum(), ')');
EXPECT_EQ(expected_tokens[4].left(test_data.code), 8);
EXPECT_EQ(expected_tokens[4].right(test_data.code), 9);
}
TEST(TokenInfoTestDataTest, FindImportantTokensTest) {
// This example best represents a typical lexer test case that verifies a
// sequence of tokens. The token enumeration values would realistically be
// written as enumeration values generated by yacc/bison.
const TokenInfoTestData test_data = {
{8, "foo"}, " ", '(', {7, "bar"}, " ", "/* comment */", ')',
};
EXPECT_EQ(test_data.code, "foo (bar /* comment */)");
const auto& expected_tokens = test_data.expected_tokens;
EXPECT_EQ(expected_tokens.size(), 7);
const auto key_tokens = test_data.FindImportantTokens();
EXPECT_EQ(key_tokens.size(), 4);
EXPECT_EQ(key_tokens[0].text(), "foo");
EXPECT_EQ(key_tokens[0].token_enum(), 8);
EXPECT_EQ(key_tokens[0].left(test_data.code), 0);
EXPECT_EQ(key_tokens[0].right(test_data.code), 3);
EXPECT_EQ(key_tokens[1].text(), "(");
EXPECT_EQ(key_tokens[1].token_enum(), '(');
EXPECT_EQ(key_tokens[1].left(test_data.code), 4);
EXPECT_EQ(key_tokens[1].right(test_data.code), 5);
EXPECT_EQ(key_tokens[2].text(), "bar");
EXPECT_EQ(key_tokens[2].token_enum(), 7);
EXPECT_EQ(key_tokens[2].left(test_data.code), 5);
EXPECT_EQ(key_tokens[2].right(test_data.code), 8);
EXPECT_EQ(key_tokens[3].text(), ")");
EXPECT_EQ(key_tokens[3].token_enum(), ')');
EXPECT_EQ(key_tokens[3].left(test_data.code), 22);
EXPECT_EQ(key_tokens[3].right(test_data.code), 23);
}
TEST(TokenInfoTestDataTest, RebaseToCodeCopyEmpty) {
std::vector<TokenInfo> tokens;
const TokenInfoTestData test_data{};
constexpr absl::string_view other_text("");
EXPECT_EQ(test_data.code, other_text);
test_data.RebaseToCodeCopy(&tokens, other_text);
EXPECT_TRUE(tokens.empty());
}
TEST(TokenInfoTestDataTest, RebaseToCodeCopyMoved) {
const TokenInfoTestData test_data{
{3, "text"},
{4, "book"},
};
constexpr absl::string_view other_text("textbook"); // separate copy
std::vector<TokenInfo> tokens = test_data.expected_tokens; // copy
EXPECT_EQ(test_data.code, other_text);
EXPECT_EQ(tokens.size(), 2);
EXPECT_EQ(tokens[0].left(test_data.code), 0);
EXPECT_EQ(tokens[0].right(test_data.code), 4);
EXPECT_EQ(tokens[1].left(test_data.code), 4);
EXPECT_EQ(tokens[1].right(test_data.code), 8);
test_data.RebaseToCodeCopy(&tokens, other_text);
// Note: expected locations are now based off of other_text.
EXPECT_EQ(tokens[0].text(), "text");
EXPECT_EQ(tokens[0].token_enum(), 3);
EXPECT_EQ(tokens[0].left(other_text), 0);
EXPECT_EQ(tokens[0].right(other_text), 4);
EXPECT_EQ(tokens[1].text(), "book");
EXPECT_EQ(tokens[1].token_enum(), 4);
EXPECT_EQ(tokens[1].left(other_text), 4);
EXPECT_EQ(tokens[1].right(other_text), 8);
}
TEST(TokenInfoTestDataTest, FindImportantTokensRebased) {
const TokenInfoTestData test_data{
{3, "text"},
{4, "book"},
};
constexpr absl::string_view other_text("textbook"); // separate copy
std::vector<TokenInfo> tokens = test_data.FindImportantTokens(other_text);
EXPECT_EQ(test_data.code, other_text);
EXPECT_EQ(tokens.size(), 2);
// Note: expected locations are now based off of other_text.
EXPECT_EQ(tokens[0].text(), "text");
EXPECT_EQ(tokens[0].token_enum(), 3);
EXPECT_EQ(tokens[0].left(other_text), 0);
EXPECT_EQ(tokens[0].right(other_text), 4);
EXPECT_EQ(tokens[1].text(), "book");
EXPECT_EQ(tokens[1].token_enum(), 4);
EXPECT_EQ(tokens[1].left(other_text), 4);
EXPECT_EQ(tokens[1].right(other_text), 8);
}
} // namespace
} // namespace verible