做个LRU,算法挺简单的。。。
而且好像用处也挺广的(?),用的比较广的一个cache算法
比如我cache只有4这么大,现在有很多元素1,2,2,4,2,5,3
cache income:1
1
cache income:2
2 1
cache income:1
1 2
cache income:4
4 1 2
cache income:2
2 4 1
cache income:5
5 2 4 1
cache income:3
3 5 2 4
大概就这么个样子。。。
看出来了吧,新按元素使用率(?)排序,最后使用的放最前面
如果cache不满,新来的放第一个,如果满了,在cache里面就把里面那个放到第一个,如果不在就删除最后一个,然后把新元素放第一个。
ok,算法就说完了。。
talk is cheap , show me the code...
经常看到各种经典算法,感觉都很简单啊。。。
当然这个确实也简单
就是用一个双向链表+map
不用map查找的话就要遍历了。。。时间复杂度就上升了
双向链表的好处就是。。。用map定位到那个节点,然后很方便的移动或者删除啊什么的,单向就做不到啦,因为你要删除还要找prev
双向链表就不写了,用stl的list代替
struct CacheNode{ int key; int value; CacheNode(int k , int v) : key(k) , value(v){} }; class LRUCache{ public: LRUCache(int capacity) { size = capacity; } int get(int key) { if(cacheMap.find(key) != cacheMap.end()){ auto it = cacheMap[key]; cacheList.splice(cacheList.begin() , cacheList , it); cacheMap[key] = cacheList.begin(); return cacheList.begin()->value; }else{ return -1; } } void set(int key, int value) { if (cacheMap.find(key) == cacheMap.end()){ if(cacheList.size() == size){ cacheMap.erase(cacheList.back().key); cacheList.pop_back(); } cacheList.push_front(CacheNode(key , value)); cacheMap[key] = cacheList.begin(); }else{ auto it = cacheMap[key]; cacheList.splice(cacheList.begin() , cacheList , it); cacheMap[key] = cacheList.begin(); cacheList.begin()->value = value; } } private: int size; list<CacheNode> cacheList; unordered_map<int , list<CacheNode>::iterator > cacheMap; };
===update 13/07/2014
从新自己用双向链表实现了一次,虽然原理很简单,但是一些细节总是弄错T_T
所以debug了1个小时,真是伤心。。。
#include <iostream> #include <unordered_map> using namespace std; struct CacheNode { int key; int value; CacheNode* next; CacheNode* prev; CacheNode(int _key, int _value) { key = _key; value = _value; next = nullptr; prev = nullptr; } }; class LRUCache{ public: LRUCache(int capacity) { _capacity = capacity; head = new CacheNode(INT_MIN,-1); tail = head->next; len = 0; } int get(int key) { // cout << "start get" <<endl; auto found = cache.find(key); if (found != cache.end()) { if (found -> second == tail) { tail = tail->prev; } insertToHead(found->second); if (tail == head) tail = head -> next; return found -> second -> value; } return -1; } void set(int key, int value) { //in cache // cout << "Start set key = " <<key<<", value="<<value <<endl; auto found = cache.find(key); if (found != cache.end()) { // cout << "In cache" <<endl; found->second->value = value; if (found -> second == tail) { tail = tail->prev; } insertToHead(found->second); if (tail == head) tail = head->next; return ; } // cout << "Not in cache"<<endl; //not in cache //trace // outCache(head); if (len == _capacity) { // cout << "full" <<endl; // cout << "Tail = " << tail <<" Tail->pre = "<< tail->prev << endl; CacheNode* tmp = tail -> prev; cache.erase(tail->key); deleteNodeLink(tail); // cout << "delete done"<<endl; // cout << tail->key << endl; delete tail; tail = tmp; insertToHead(new CacheNode(key, value)); return ; } //not full insertToHead(new CacheNode(key, value)); if (tail == nullptr) tail = head->next; // cout << "Tail = " << tail <<endl; len++; // cout << len << endl; } private: CacheNode* head; CacheNode* tail; int _capacity; int len; unordered_map<int, CacheNode*> cache; void deleteNodeLink(CacheNode* node) { CacheNode* prev = nullptr; CacheNode* next = nullptr; if (node -> prev) prev = node->prev; if (prev) { prev -> next = node -> next; } if(node->next) next = node -> next; if(next) { next -> prev = prev; } } void insertToHead(CacheNode* node) { deleteNodeLink(node); node->prev = head; node->next = head->next; if (head -> next) head->next->prev = node; head->next = node; cache[node->key] = node; } void outCache(CacheNode* root) { while(root) { cout << root->key << " "; root = root -> next; } cout << endl; } }; int main() { LRUCache lru(1); lru.set(2,1); cout<<lru.get(2)<<endl; lru.set(3,2); cout<<lru.get(2)<<endl; cout<<lru.get(3)<<endl; }