Skip to content

Commit

Permalink
Refactor compile-related tests to share construction. (#4396)
Browse files Browse the repository at this point in the history
Note in particular that this fixes an issue where SharedValueStore had
been shared across files, when they should be per-file. This is only
visible when doing multiple compilations in a single test, which was
rare before.

This also moves these tests into the Testing namespace. My memory of the
various namespacing changes is that we'd generally agreed to have tests
in Testing so that we'd see SemIR:: and similar, same as we would in a
lot of the implementation.
  • Loading branch information
jonmeow authored Oct 10, 2024
1 parent b1014bf commit 0f35025
Show file tree
Hide file tree
Showing 8 changed files with 452 additions and 372 deletions.
1 change: 1 addition & 0 deletions toolchain/lex/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,7 @@ cc_test(
"//toolchain/base:value_store",
"//toolchain/diagnostics:diagnostic_emitter",
"//toolchain/diagnostics:mocks",
"//toolchain/testing:compile_helper",
"//toolchain/testing:yaml_test_helpers",
"@googletest//:gtest",
"@llvm-project//llvm:Support",
Expand Down
456 changes: 224 additions & 232 deletions toolchain/lex/tokenized_buffer_test.cpp

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions toolchain/parse/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ cc_test(
"//toolchain/diagnostics:mocks",
"//toolchain/lex",
"//toolchain/lex:tokenized_buffer",
"//toolchain/testing:compile_helper",
"@googletest//:gtest",
],
)
Expand Down Expand Up @@ -139,6 +140,7 @@ cc_test(
"//toolchain/diagnostics:mocks",
"//toolchain/lex",
"//toolchain/lex:tokenized_buffer",
"//toolchain/testing:compile_helper",
"//toolchain/testing:yaml_test_helpers",
"@googletest//:gtest",
"@llvm-project//llvm:Support",
Expand Down
53 changes: 18 additions & 35 deletions toolchain/parse/tree_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
#include "toolchain/lex/tokenized_buffer.h"
#include "toolchain/parse/parse.h"
#include "toolchain/parse/tree_and_subtrees.h"
#include "toolchain/testing/compile_helper.h"
#include "toolchain/testing/yaml_test_helpers.h"

namespace Carbon::Parse {
Expand All @@ -30,38 +31,19 @@ namespace Yaml = ::Carbon::Testing::Yaml;

class TreeTest : public ::testing::Test {
protected:
auto GetSourceBuffer(llvm::StringRef t) -> SourceBuffer& {
CARBON_CHECK(fs_.addFile("test.carbon", /*ModificationTime=*/0,
llvm::MemoryBuffer::getMemBuffer(t)));
source_storage_.push_front(
std::move(*SourceBuffer::MakeFromFile(fs_, "test.carbon", consumer_)));
return source_storage_.front();
}

auto GetTokenizedBuffer(llvm::StringRef t) -> Lex::TokenizedBuffer& {
token_storage_.push_front(
Lex::Lex(value_stores_, GetSourceBuffer(t), consumer_));
return token_storage_.front();
}

SharedValueStores value_stores_;
llvm::vfs::InMemoryFileSystem fs_;
std::forward_list<SourceBuffer> source_storage_;
std::forward_list<Lex::TokenizedBuffer> token_storage_;
DiagnosticConsumer& consumer_ = ConsoleDiagnosticConsumer();
Testing::CompileHelper compile_helper_;
};

TEST_F(TreeTest, IsValid) {
Lex::TokenizedBuffer& tokens = GetTokenizedBuffer("");
Tree tree = Parse(tokens, consumer_, /*vlog_stream=*/nullptr);
Tree& tree = compile_helper_.GetTree("");
EXPECT_TRUE((*tree.postorder().begin()).is_valid());
}

TEST_F(TreeTest, AsAndTryAs) {
Lex::TokenizedBuffer& tokens = GetTokenizedBuffer("fn F();");
Tree tree = Parse(tokens, consumer_, /*vlog_stream=*/nullptr);
auto [tokens, tree_and_subtrees] =
compile_helper_.GetTokenizedBufferWithTreeAndSubtrees("fn F();");
const auto& tree = tree_and_subtrees.tree();
ASSERT_FALSE(tree.has_errors());
TreeAndSubtrees tree_and_subtrees(tokens, tree);
auto it = tree_and_subtrees.roots().begin();
// A FileEnd node, so won't match.
NodeId n = *it;
Expand Down Expand Up @@ -105,11 +87,11 @@ TEST_F(TreeTest, AsAndTryAs) {
}

TEST_F(TreeTest, PrintPostorderAsYAML) {
Lex::TokenizedBuffer& tokens = GetTokenizedBuffer("fn F();");
Tree tree = Parse(tokens, consumer_, /*vlog_stream=*/nullptr);
EXPECT_FALSE(tree.has_errors());
auto [tokens, tree_and_subtrees] =
compile_helper_.GetTokenizedBufferWithTreeAndSubtrees("fn F();");
EXPECT_FALSE(tree_and_subtrees.tree().has_errors());
TestRawOstream print_stream;
tree.Print(print_stream);
tree_and_subtrees.tree().Print(print_stream);

auto file = Yaml::Sequence(ElementsAre(
Yaml::Mapping(ElementsAre(Pair("kind", "FileStart"), Pair("text", ""))),
Expand All @@ -126,17 +108,17 @@ TEST_F(TreeTest, PrintPostorderAsYAML) {
Yaml::Mapping(ElementsAre(Pair("kind", "FileEnd"), Pair("text", "")))));

auto root = Yaml::Sequence(ElementsAre(Yaml::Mapping(
ElementsAre(Pair("filename", "test.carbon"), Pair("parse_tree", file)))));
ElementsAre(Pair("filename", tokens.source().filename().str()),
Pair("parse_tree", file)))));

EXPECT_THAT(Yaml::Value::FromText(print_stream.TakeStr()),
IsYaml(ElementsAre(root)));
}

TEST_F(TreeTest, PrintPreorderAsYAML) {
Lex::TokenizedBuffer& tokens = GetTokenizedBuffer("fn F();");
Tree tree = Parse(tokens, consumer_, /*vlog_stream=*/nullptr);
EXPECT_FALSE(tree.has_errors());
TreeAndSubtrees tree_and_subtrees(tokens, tree);
auto [tokens, tree_and_subtrees] =
compile_helper_.GetTokenizedBufferWithTreeAndSubtrees("fn F();");
EXPECT_FALSE(tree_and_subtrees.tree().has_errors());
TestRawOstream print_stream;
tree_and_subtrees.PrintPreorder(print_stream);

Expand Down Expand Up @@ -167,7 +149,8 @@ TEST_F(TreeTest, PrintPreorderAsYAML) {
Pair("kind", "FileEnd"), Pair("text", "")))));

auto root = Yaml::Sequence(ElementsAre(Yaml::Mapping(
ElementsAre(Pair("filename", "test.carbon"), Pair("parse_tree", file)))));
ElementsAre(Pair("filename", tokens.source().filename().str()),
Pair("parse_tree", file)))));

EXPECT_THAT(Yaml::Value::FromText(print_stream.TakeStr()),
IsYaml(ElementsAre(root)));
Expand All @@ -178,7 +161,7 @@ TEST_F(TreeTest, HighRecursion) {
code.append(10000, '(');
code.append(10000, ')');
code += "; }";
Lex::TokenizedBuffer& tokens = GetTokenizedBuffer(code);
Lex::TokenizedBuffer& tokens = compile_helper_.GetTokenizedBuffer(code);
ASSERT_FALSE(tokens.has_errors());
Testing::MockDiagnosticConsumer consumer;
Tree tree = Parse(tokens, consumer, /*vlog_stream=*/nullptr);
Expand Down
Loading

0 comments on commit 0f35025

Please sign in to comment.