-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathSCache.h
executable file
·139 lines (122 loc) · 3.59 KB
/
SCache.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
#pragma once
#include <unordered_map>
#include <memory>
#include <common.h>
#include <moodycamel/blockingconcurrentqueue.h>
#include <atomic>
#include <type_traits>
#include <GUtil.h>
#include <CachePersister.h>
enum CacheOptions
{
DOPERSIST = 1,
DOPUBLISH = 2,
LOADNOW = 4,
DOASYNC = 8
};
template<typename Key, typename Type>
class SCache
{
private:
std::unordered_map<Key, Type> _map;
std::unique_ptr<CachePersister> _persister;
int _cacheOptions = 0;
std::shared_mutex _sharedMutex;
std::mutex _persistMutex;
public:
SCache() = default;
SCache(const SCache&) = default;
SCache(SCache&&) = delete;
SCache& operator=(const SCache&) = default;
EventHandler<Type> ItemChanged;
typedef typename std::unordered_map<Key, Type>::iterator iterator;
typedef typename std::unordered_map<Key, Type>::const_iterator const_iterator;
iterator begin() { return _map.begin(); }
const_iterator begin() const { return _map.begin(); }
iterator end() { return _map.end(); }
const_iterator end() const { return _map.end(); }
bool empty() const { return _map.empty(); }
size_t size() const { return _map.size(); }
// non thread safe entries.. use carefully
template <class... Args>
std::pair<iterator, bool> emplace(Args&&... args)
{
return _map.emplace(std::forward<Args>(args)...);
}
// thread safe entries
bool GetByKey(const Key& key, Type& value)
{
std::shared_lock<std::shared_mutex> lock(_sharedMutex);
auto iter = _map.find(key);
if (iter == _map.end())
return false;
value = iter->second;
return true;
}
Type& GetByKey(const Key& key)
{
std::unique_lock<std::shared_mutex> lock(_sharedMutex);
return _map[key];
}
void PutByKey(Key& key, Type& value, bool raiseEvent=true,bool persistNow=true)
{
{
std::unique_lock<std::shared_mutex> lock(_sharedMutex);
_map[key] = value;
}
if (raiseEvent)
ItemChanged.raise(value);
if (!_persister)
return;
if (persistNow)
if (_cacheOptions & CacheOptions::DOASYNC )
{
_persister->persistMessage(key, value);
}
else
{
// persist and publish in the same thread .. (slow)
Json vj;
std::string vs;
{
dump(get_ref<Type>{}(value), vj);
vs = vj.dump();
}
if (_persister->shouldPersist())
GUtil::instance().set(_persister->getPersistStream(), get_value<Key>{}(key), vs);
if (_persister->shouldPublish())
GUtil::instance().publish(_persister->getPersistStream(), vs);
}
}
void ClearAll() { _map.clear(); }
void setPersistence(const std::string& stream, int cacheOptions = (CacheOptions::LOADNOW | CacheOptions::DOASYNC | CacheOptions::DOPERSIST))
{
_persister.reset(new CachePersister(stream, ((cacheOptions & CacheOptions::DOPERSIST) != 0), ((cacheOptions & CacheOptions::DOPUBLISH) != 0)));
if (cacheOptions & CacheOptions::LOADNOW)
{
loadFromCache();
}
_cacheOptions = cacheOptions;
}
void copyFrom(const SCache<Key, Type>& cache)
{
for (auto& elem : cache)
{
_map[elem.first] = elem.second;
}
}
private:
void loadFromCache()
{
int count = 0;
_persister->loadMsg([&](const std::string& key, const std::string& val) {
Type obj;
load(get_Obj<Type>{}(obj), val);
_map[get_actual<Key>{}(key)] = obj;
++count;
});
info("Loaded {0} records from cache {1}", count, _persister->getPersistStream());
}
};
template <typename Type>
using SSCache = SCache<std::string, Type>;