做個LRU,算法挺簡單的。。。
而且好像用處也挺廣的(?),用的比較廣的一個cache算法
比如我cache只有4這么大,現在有很多元素1,2,2,4,2,5,3
cache income:1
1
cache income:2
2 1
cache income:1
1 2
cache income:4
4 1 2
cache income:2
2 4 1
cache income:5
5 2 4 1
cache income:3
3 5 2 4
大概就這么個樣子。。。
看出來了吧,新按元素使用率(?)排序,最后使用的放最前面
如果cache不滿,新來的放第一個,如果滿了,在cache里面就把里面那個放到第一個,如果不在就刪除最后一個,然后把新元素放第一個。
ok,算法就說完了。。
talk is cheap , show me the code...
經常看到各種經典算法,感覺都很簡單啊。。。
當然這個確實也簡單
就是用一個雙向鏈表+map
不用map查找的話就要遍歷了。。。時間復雜度就上升了
雙向鏈表的好處就是。。。用map定位到那個節點,然后很方便的移動或者刪除啊什么的,單向就做不到啦,因為你要刪除還要找prev
雙向鏈表就不寫了,用stl的list代替
struct CacheNode{ int key; int value; CacheNode(int k , int v) : key(k) , value(v){} }; class LRUCache{ public: LRUCache(int capacity) { size = capacity; } int get(int key) { if(cacheMap.find(key) != cacheMap.end()){ auto it = cacheMap[key]; cacheList.splice(cacheList.begin() , cacheList , it); cacheMap[key] = cacheList.begin(); return cacheList.begin()->value; }else{ return -1; } } void set(int key, int value) { if (cacheMap.find(key) == cacheMap.end()){ if(cacheList.size() == size){ cacheMap.erase(cacheList.back().key); cacheList.pop_back(); } cacheList.push_front(CacheNode(key , value)); cacheMap[key] = cacheList.begin(); }else{ auto it = cacheMap[key]; cacheList.splice(cacheList.begin() , cacheList , it); cacheMap[key] = cacheList.begin(); cacheList.begin()->value = value; } } private: int size; list<CacheNode> cacheList; unordered_map<int , list<CacheNode>::iterator > cacheMap; };
===update 13/07/2014
從新自己用雙向鏈表實現了一次,雖然原理很簡單,但是一些細節總是弄錯T_T
所以debug了1個小時,真是傷心。。。
#include <iostream> #include <unordered_map> using namespace std; struct CacheNode { int key; int value; CacheNode* next; CacheNode* prev; CacheNode(int _key, int _value) { key = _key; value = _value; next = nullptr; prev = nullptr; } }; class LRUCache{ public: LRUCache(int capacity) { _capacity = capacity; head = new CacheNode(INT_MIN,-1); tail = head->next; len = 0; } int get(int key) { // cout << "start get" <<endl; auto found = cache.find(key); if (found != cache.end()) { if (found -> second == tail) { tail = tail->prev; } insertToHead(found->second); if (tail == head) tail = head -> next; return found -> second -> value; } return -1; } void set(int key, int value) { //in cache // cout << "Start set key = " <<key<<", value="<<value <<endl; auto found = cache.find(key); if (found != cache.end()) { // cout << "In cache" <<endl; found->second->value = value; if (found -> second == tail) { tail = tail->prev; } insertToHead(found->second); if (tail == head) tail = head->next; return ; } // cout << "Not in cache"<<endl; //not in cache //trace // outCache(head); if (len == _capacity) { // cout << "full" <<endl; // cout << "Tail = " << tail <<" Tail->pre = "<< tail->prev << endl; CacheNode* tmp = tail -> prev; cache.erase(tail->key); deleteNodeLink(tail); // cout << "delete done"<<endl; // cout << tail->key << endl; delete tail; tail = tmp; insertToHead(new CacheNode(key, value)); return ; } //not full insertToHead(new CacheNode(key, value)); if (tail == nullptr) tail = head->next; // cout << "Tail = " << tail <<endl; len++; // cout << len << endl; } private: CacheNode* head; CacheNode* tail; int _capacity; int len; unordered_map<int, CacheNode*> cache; void deleteNodeLink(CacheNode* node) { CacheNode* prev = nullptr; CacheNode* next = nullptr; if (node -> prev) prev = node->prev; if (prev) { prev -> next = node -> next; } if(node->next) next = node -> next; if(next) { next -> prev = prev; } } void insertToHead(CacheNode* node) { deleteNodeLink(node); node->prev = head; node->next = head->next; if (head -> next) head->next->prev = node; head->next = node; cache[node->key] = node; } void outCache(CacheNode* root) { while(root) { cout << root->key << " "; root = root -> next; } cout << endl; } }; int main() { LRUCache lru(1); lru.set(2,1); cout<<lru.get(2)<<endl; lru.set(3,2); cout<<lru.get(2)<<endl; cout<<lru.get(3)<<endl; }