-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathShardedConcurrentLRUCacheTests.cpp
111 lines (102 loc) · 3.42 KB
/
ShardedConcurrentLRUCacheTests.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
// Copyright 2009-present Pinterest. All Rights Reserved.
//
// @author shu ([email protected])
//
#include <atomic>
#include <string>
#include <thread>
#include <vector>
#include "common/concurrent_lru_cache.h"
#include "glog/logging.h"
#include "gtest/gtest.h"
using common::ConcurrentLRUCache;
using std::string;
TEST(ConcurrentLRUCache, Basics) {
// Single threaded. No eviction.
ConcurrentLRUCache<string, string> cache =
ConcurrentLRUCache<string, string>(2, 5);
auto value = std::make_shared<string>("a");
cache.put("a", value);
EXPECT_EQ(*cache.get("a"), "a");
EXPECT_EQ(cache.get("b"), nullptr);
}
TEST(ConcurrentLRUCache, Eviction) {
// Single threaded, eviction.
ConcurrentLRUCache<string, string> cache =
ConcurrentLRUCache<string, string>(1, 2);
auto evicting_value = std::make_shared<string>("a");
cache.put("a", evicting_value);
cache.put("b", std::make_shared<string>("b"));
cache.put("c", std::make_shared<string>("c"));
EXPECT_EQ(cache.get("a"), nullptr);
EXPECT_EQ(*cache.get("b"), "b");
EXPECT_EQ(*cache.get("c"), "c");
EXPECT_EQ(*evicting_value, "a");
}
TEST(ConcurrentLRUCache, StressNoEvict) {
// capacity of 4 with 10 threads, no eviction.
ConcurrentLRUCache<string, string> cache =
ConcurrentLRUCache<string, string>(2, 4);
std::vector<std::thread> threads;
for (int i = 0; i < 4; i++) {
threads.push_back(std::thread([&cache]() {
for (int j = 0; j < 100000; j ++) {
cache.put("a", std::make_shared<string>("a"));
cache.put("b", std::make_shared<string>("b"));
cache.put("c", std::make_shared<string>("c"));
cache.put("d", std::make_shared<string>("d"));
EXPECT_EQ(*cache.get("a"), "a");
EXPECT_EQ(*cache.get("b"), "b");
EXPECT_EQ(*cache.get("c"), "c");
EXPECT_EQ(*cache.get("d"), "d");
}
}));
}
for (auto& thread : threads) {
thread.join();
}
}
void simulate_get_put(ConcurrentLRUCache<string, string>* cache,
const string& key, int* miss_count, int* hit_count) {
auto value_ptr = cache->get(key);
if (value_ptr == nullptr) {
(*miss_count)++;
auto new_value = std::make_shared<string>(key);
cache->put(key, new_value);
} else {
EXPECT_EQ(*value_ptr, key);
(*hit_count)++;
}
}
TEST(ConcurrentLRUCache, StressEvict) {
ConcurrentLRUCache<string, string> cache =
ConcurrentLRUCache<string, string>(2, 4);
std::vector<std::thread> threads;
for (int i = 0; i < 10; i++) {
threads.push_back(std::thread([&cache]() {
int hit_count = 0;
int miss_count = 0;
for (int j = 0; j < 100000; j++) {
simulate_get_put(&cache, "a", &hit_count, &miss_count);
simulate_get_put(&cache, "b", &hit_count, &miss_count);
simulate_get_put(&cache, "c", &hit_count, &miss_count);
simulate_get_put(&cache, "d", &hit_count, &miss_count);
simulate_get_put(&cache, "e", &hit_count, &miss_count);
simulate_get_put(&cache, "f", &hit_count, &miss_count);
simulate_get_put(&cache, "g", &hit_count, &miss_count);
}
LOG(INFO) << "Hitting count:" << hit_count;
LOG(INFO) << "Missing count:" << miss_count;
EXPECT_EQ(hit_count + miss_count, 700000);
}));
}
for (auto& thread : threads) {
thread.join();
}
}
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
google::InitGoogleLogging(argv[0]);
FLAGS_logtostderr = true;
return RUN_ALL_TESTS();
}