Cache Algorithms and Implementations

Introduce some common CPU cache algorithms and their implementations.

1. LRU (Least Recent Used)

1.1 How it works

1.2 Implementation

The key is to have a linked list that simulate a cache storage and a hashtable to keep track of the data in the cache.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
#include <iostream>
#include <unordered_map>
#include <list>

class LRUCache {
private:
struct Node {
int key;
int value;
Node(int k, int v) : key(k), value(v) {}
};

int capacity;
std::list<Node> cacheList; // Doubly linked list to store cache items
std::unordered_map<int, std::list<Node>::iterator> cacheMap; // Hash map to store key-node pairs

void moveToFront(int key) {
auto nodeIt = cacheMap[key];
cacheList.splice(cacheList.begin(), cacheList, nodeIt);
}

public:
LRUCache(int cap) : capacity(cap) {}

int get(int key) {
if (cacheMap.find(key) == cacheMap.end()) {
return -1; // Key not found
}
moveToFront(key);
return cacheList.begin()->value;
}

void put(int key, int value) {
if (cacheMap.find(key) != cacheMap.end()) {
auto nodeIt = cacheMap[key];
nodeIt->value = value;
moveToFront(key);
return;
}

if (cacheMap.size() >= capacity) {
int keyToRemove = cacheList.back().key;
cacheList.pop_back();
cacheMap.erase(keyToRemove);
}

cacheList.emplace_front(key, value);
cacheMap[key] = cacheList.begin();
}

void display() const {
for (const auto& node : cacheList) {
std::cout << node.key << ": " << node.value << " ";
}
std::cout << std::endl;
}
};

int main() {
LRUCache cache(3);

cache.put(1, 1);
cache.put(2, 2);
cache.put(3, 3);
std::cout << "Initial cache:" << std::endl;
cache.display();

std::cout << "Get 1: " << cache.get(1) << std::endl;
cache.display();

cache.put(4, 4);
std::cout << "Cache after adding 4:" << std::endl;
cache.display();

std::cout << "Get 2: " << cache.get(2) << std::endl;
cache.display();

cache.put(5, 5);
std::cout << "Cache after adding 5:" << std::endl;
cache.display();

return 0;
}
1
2
3
4
5
6
7
8
9
10
Initial cache:
3: 3 2: 2 1: 1
Get 1: 1
1: 1 3: 3 2: 2
Cache after adding 4:
4: 4 1: 1 3: 3
Get 2: -1
4: 4 1: 1 3: 3
Cache after adding 5:
5: 5 4: 4 1: 1

2. LFU (Least Frequently Used)

2.1 How it works

2.2 Implementation

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
#include <iostream>
#include <unordered_map>
#include <list>
#include <vector>
#include <algorithm>

class LFUCache {
private:
// Node structure to store key, value, and frequency
struct Node {
int key;
int value;
int freq;
Node(int k, int v, int f) : key(k), value(v), freq(f) {}
};

int capacity;
int minFreq;
std::unordered_map<int, std::list<Node>::iterator> keyNodeMap; // Maps key to node
std::unordered_map<int, std::list<Node>> freqListMap; // Maps frequency to list of nodes

public:
LFUCache(int cap) : capacity(cap), minFreq(0) {}

int get(int key) {
if (keyNodeMap.find(key) == keyNodeMap.end()) {
return -1; // Key not found
}

auto nodeIt = keyNodeMap[key];
int value = nodeIt->value;
int freq = nodeIt->freq;

// Remove the node from the current frequency list
freqListMap[freq].erase(nodeIt);
if (freqListMap[freq].empty()) {
freqListMap.erase(freq);
if (minFreq == freq) {
minFreq++;
}
}

// Insert the node into the next frequency list
freq++;
freqListMap[freq].emplace_front(key, value, freq);
keyNodeMap[key] = freqListMap[freq].begin();

return value;
}

void put(int key, int value) {
if (capacity == 0) return;

if (keyNodeMap.find(key) != keyNodeMap.end()) {
// Key already exists, update the value and frequency
auto nodeIt = keyNodeMap[key];
nodeIt->value = value;
get(key); // Update frequency
return;
}

if (keyNodeMap.size() >= capacity) {
// Evict the least frequently used node
auto nodeToEvict = freqListMap[minFreq].back();
keyNodeMap.erase(nodeToEvict.key);
freqListMap[minFreq].pop_back();
if (freqListMap[minFreq].empty()) {
freqListMap.erase(minFreq);
}
}

// Insert the new node
minFreq = 1;
freqListMap[minFreq].emplace_front(key, value, minFreq);
keyNodeMap[key] = freqListMap[minFreq].begin();
}
};

int main() {
LFUCache cache(2);

cache.put(1, 1);
cache.put(2, 2);
std::cout << "Get 1: " << cache.get(1) << std::endl; // returns 1
cache.put(3, 3); // evicts key 2
std::cout << "Get 2: " << cache.get(2) << std::endl; // returns -1 (not found)
std::cout << "Get 3: " << cache.get(3) << std::endl; // returns 3
cache.put(4, 4); // evicts key 1
std::cout << "Get 1: " << cache.get(1) << std::endl; // returns -1 (not found)
std::cout << "Get 3: " << cache.get(3) << std::endl; // returns 3
std::cout << "Get 4: " << cache.get(4) << std::endl; // returns 4

return 0;
}
1
2
3
4
5
6
Get 1: 1
Get 2: -1
Get 3: 3
Get 1: -1
Get 3: 3
Get 4: 4

3. FIFO (First In First Out)

3.1 How it works

3.2 Implementation

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
#include <iostream>
#include <unordered_map>
#include <queue>

class FIFOCache {
private:
int capacity;
std::queue<int> cacheQueue; // Queue to store keys in the order they were added
std::unordered_map<int, int> cacheMap; // Hash map to store key-value pairs

public:
FIFOCache(int cap) : capacity(cap) {}

int get(int key) {
if (cacheMap.find(key) == cacheMap.end()) {
return -1; // Key not found
}
return cacheMap[key];
}

void put(int key, int value) {
if (cacheMap.size() >= capacity) {
int keyToRemove = cacheQueue.front();
cacheQueue.pop();
cacheMap.erase(keyToRemove);
}

cacheQueue.push(key);
cacheMap[key] = value;
}

void display() const {
std::queue<int> tempQueue = cacheQueue;
while (!tempQueue.empty()) {
int key = tempQueue.front();
tempQueue.pop();
std::cout << key << ": " << cacheMap.at(key) << " ";
}
std::cout << std::endl;
}
};

int main() {
FIFOCache cache(3);

cache.put(1, 1);
cache.put(2, 2);
cache.put(3, 3);
std::cout << "Initial cache:" << std::endl;
cache.display();

std::cout << "Get 1: " << cache.get(1) << std::endl;
cache.display();

cache.put(4, 4);
std::cout << "Cache after adding 4:" << std::endl;
cache.display();

std::cout << "Get 2: " << cache.get(2) << std::endl;
cache.display();

cache.put(5, 5);
std::cout << "Cache after adding 5:" << std::endl;
cache.display();

return 0;
}
1
2
3
4
5
6
7
8
9
10
Initial cache:
1: 1 2: 2 3: 3
Get 1: 1
1: 1 2: 2 3: 3
Cache after adding 4:
2: 2 3: 3 4: 4
Get 2: 2
2: 2 3: 3 4: 4
Cache after adding 5:
3: 3 4: 4 5: 5

References

Author

Joe Chu

Posted on

2024-07-06

Updated on

2024-07-07

Licensed under

Comments