Skip to content

Commit 00ffe76

Browse files
committed
Consistant hash & LRU Cache
1 parent 54b5546 commit 00ffe76

File tree

6 files changed

+336
-0
lines changed

6 files changed

+336
-0
lines changed

Advanced.Algorithms.Tests/Advanced.Algorithms.Tests.csproj

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,8 @@
125125
<Compile Include="DataStructures\Tree\TestHelpers\BinarySearchTreeTester.cs" />
126126
<Compile Include="DataStructures\Tree\Tree_Tests.cs" />
127127
<Compile Include="DataStructures\Tree\BinaryTree_Tests.cs" />
128+
<Compile Include="DistributedSystems\ConsistentHash_Tests.cs" />
129+
<Compile Include="DistributedSystems\LRUCache_Tests.cs" />
128130
<Compile Include="Geometry\PointRotation_Tests.cs" />
129131
<Compile Include="GraphAlgorithms\ShortestPath\TravellingSalesman_Tests.cs" />
130132
<Compile Include="Miscellaneous\MatrixMultiplication_Tests.cs" />
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
using Advanced.Algorithms.DistributedSystems;
2+
using Microsoft.VisualStudio.TestTools.UnitTesting;
3+
using System;
4+
5+
namespace Advanced.Algorithms.Tests.DistributedSystems
6+
{
7+
[TestClass]
8+
public class ConsistentHash_Tests
9+
{
10+
11+
[TestMethod]
12+
public void ConsistantHash_Smoke_Test()
13+
{
14+
var hash = new ConsistentHash<int>();
15+
16+
hash.AddNode(15);
17+
hash.AddNode(25);
18+
hash.AddNode(172);
19+
20+
for (int i = 200; i < 300; i++)
21+
{
22+
hash.AddNode(i);
23+
}
24+
25+
hash.RemoveNode(15);
26+
hash.RemoveNode(172);
27+
hash.RemoveNode(25);
28+
29+
var rnd = new Random();
30+
for (int i = 0; i < 1000; i++)
31+
{
32+
Assert.AreNotEqual(15, hash.GetNode(rnd.Next().ToString()));
33+
Assert.AreNotEqual(25, hash.GetNode(rnd.Next().ToString()));
34+
Assert.AreNotEqual(172, hash.GetNode(rnd.Next().ToString()));
35+
36+
var t = hash.GetNode(rnd.Next().ToString());
37+
Assert.IsTrue(t >= 200 && t < 300);
38+
}
39+
40+
}
41+
42+
}
43+
}
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
using Advanced.Algorithms.DistributedSystems;
2+
using Microsoft.VisualStudio.TestTools.UnitTesting;
3+
using System;
4+
5+
namespace Advanced.Algorithms.Tests.DistributedSystems
6+
{
7+
[TestClass]
8+
public class LRUCache_Tests
9+
{
10+
11+
[TestMethod]
12+
public void LRUCache_Smoke_Test()
13+
{
14+
var cache = new LRUCache<int,int>(2);
15+
16+
cache.Put(1, 1);
17+
cache.Put(2, 2);
18+
Assert.AreEqual(1, cache.Get(1));
19+
20+
cache.Put(3, 3);
21+
Assert.AreEqual(0, cache.Get(2));
22+
23+
cache.Put(4, 4);
24+
Assert.AreEqual(0, cache.Get(1));
25+
Assert.AreEqual(3, cache.Get(3));
26+
Assert.AreEqual(4, cache.Get(4));
27+
}
28+
29+
}
30+
}
Lines changed: 192 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,192 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Linq;
4+
using System.Runtime.InteropServices;
5+
using System.Text;
6+
7+
namespace Advanced.Algorithms.DistributedSystems
8+
{
9+
10+
/// <summary>
11+
/// A consistant hash implementation with MurmurHash
12+
/// Adapted from https://github.com/wsq003/consistent-hash/blob/master/ConsistentHash.cs
13+
/// </summary>
14+
/// <typeparam name="T"></typeparam>
15+
public class ConsistentHash<T>
16+
{
17+
SortedDictionary<int, T> circle = new SortedDictionary<int, T>();
18+
int[] circleKeys;
19+
int replicas;
20+
21+
public ConsistentHash()
22+
: this(new List<T>(), 100) { }
23+
24+
public ConsistentHash(IEnumerable<T> nodes, int replicas)
25+
{
26+
this.replicas = replicas;
27+
foreach (T node in nodes)
28+
{
29+
AddNode(node);
30+
}
31+
}
32+
33+
/// <summary>
34+
/// Add a new bucket
35+
/// </summary>
36+
/// <param name="node"></param>
37+
public void AddNode(T node)
38+
{
39+
for (int i = 0; i < replicas; i++)
40+
{
41+
int hash = getHashCode(node.GetHashCode().ToString() + i);
42+
circle[hash] = node;
43+
}
44+
45+
circleKeys = circle.Keys.ToArray();
46+
}
47+
48+
/// <summary>
49+
/// Get the bucket for the given Key
50+
/// </summary>
51+
/// <param name="key"></param>
52+
/// <returns></returns>
53+
public T GetNode(string key)
54+
{
55+
int hash = getHashCode(key);
56+
int first = Next_ClockWise(circleKeys, hash);
57+
return circle[circleKeys[first]];
58+
}
59+
60+
/// <summary>
61+
/// Remove a bucket from lookUp
62+
/// </summary>
63+
/// <param name="node"></param>
64+
public void RemoveNode(T node)
65+
{
66+
for (int i = 0; i < replicas; i++)
67+
{
68+
int hash = getHashCode(node.GetHashCode().ToString() + i);
69+
if (!circle.Remove(hash))
70+
{
71+
throw new Exception("Cannot remove a node that was never added.");
72+
}
73+
}
74+
75+
circleKeys = circle.Keys.ToArray();
76+
}
77+
78+
79+
/// <summary>
80+
/// Move clockwise until we find a bucket with Key >= hashCode
81+
/// </summary>
82+
/// <param name="keys"></param>
83+
/// <param name="hashCode"></param>
84+
/// <returns>Returns the index of bucket</returns>
85+
int Next_ClockWise(int[] keys, int hashCode)
86+
{
87+
int begin = 0;
88+
int end = keys.Length - 1;
89+
90+
if (keys[end] < hashCode || keys[0] > hashCode)
91+
{
92+
return 0;
93+
}
94+
95+
//do a binary search
96+
int mid = begin;
97+
while (end - begin > 1)
98+
{
99+
mid = (end + begin) / 2;
100+
if (keys[mid] >= hashCode)
101+
{
102+
end = mid;
103+
}
104+
else
105+
{
106+
begin = mid;
107+
}
108+
}
109+
110+
return end;
111+
}
112+
113+
114+
private static int getHashCode(string key)
115+
{
116+
return (int)MurmurHash2.Hash(Encoding.Unicode.GetBytes(key));
117+
}
118+
119+
}
120+
121+
internal class MurmurHash2
122+
{
123+
internal static UInt32 Hash(Byte[] data)
124+
{
125+
return Hash(data, 0xc58f1a7b);
126+
}
127+
const UInt32 m = 0x5bd1e995;
128+
const Int32 r = 24;
129+
130+
[StructLayout(LayoutKind.Explicit)]
131+
struct BytetoUInt32Converter
132+
{
133+
[FieldOffset(0)]
134+
public Byte[] Bytes;
135+
136+
[FieldOffset(0)]
137+
public UInt32[] UInts;
138+
}
139+
140+
internal static UInt32 Hash(Byte[] data, UInt32 seed)
141+
{
142+
Int32 length = data.Length;
143+
if (length == 0)
144+
return 0;
145+
UInt32 h = seed ^ (UInt32)length;
146+
Int32 currentIndex = 0;
147+
// array will be length of Bytes but contains Uints
148+
// therefore the currentIndex will jump with +1 while length will jump with +4
149+
UInt32[] hackArray = new BytetoUInt32Converter { Bytes = data }.UInts;
150+
while (length >= 4)
151+
{
152+
UInt32 k = hackArray[currentIndex++];
153+
k *= m;
154+
k ^= k >> r;
155+
k *= m;
156+
157+
h *= m;
158+
h ^= k;
159+
length -= 4;
160+
}
161+
currentIndex *= 4; // fix the length
162+
switch (length)
163+
{
164+
case 3:
165+
h ^= (UInt16)(data[currentIndex++] | data[currentIndex++] << 8);
166+
h ^= (UInt32)data[currentIndex] << 16;
167+
h *= m;
168+
break;
169+
case 2:
170+
h ^= (UInt16)(data[currentIndex++] | data[currentIndex] << 8);
171+
h *= m;
172+
break;
173+
case 1:
174+
h ^= data[currentIndex];
175+
h *= m;
176+
break;
177+
default:
178+
break;
179+
}
180+
181+
// Do a few final mixes of the hash to ensure the last few
182+
// bytes are well-incorporated.
183+
184+
h ^= h >> 13;
185+
h *= m;
186+
h ^= h >> 15;
187+
188+
return h;
189+
}
190+
}
191+
}
192+
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
using Advanced.Algorithms.DataStructures;
2+
using System;
3+
using System.Collections.Generic;
4+
using System.Linq;
5+
6+
7+
namespace Advanced.Algorithms.DistributedSystems
8+
{
9+
public class LRUCache<K, V>
10+
{
11+
private int capacity;
12+
13+
private Dictionary<K, DoublyLinkedListNode<Tuple<K, V>>> lookUp
14+
= new Dictionary<K, DoublyLinkedListNode<Tuple<K, V>>>();
15+
16+
private DoublyLinkedList<Tuple<K, V>> dll = new DoublyLinkedList<Tuple<K, V>>();
17+
18+
public LRUCache(int capacity)
19+
{
20+
if (capacity <= 0)
21+
{
22+
throw new Exception("Capacity must be a positive integer.");
23+
}
24+
this.capacity = capacity;
25+
}
26+
27+
/// <summary>
28+
/// O(1) time complexity
29+
/// </summary>
30+
/// <param name="key"></param>
31+
/// <returns></returns>
32+
public V Get(K key)
33+
{
34+
if (!lookUp.ContainsKey(key))
35+
return default(V);
36+
37+
var node = lookUp[key];
38+
39+
//move lately used node to beginning of ddl
40+
dll.Delete(node);
41+
var newNode = dll.InsertFirst(node.Data);
42+
lookUp[key] = newNode;
43+
44+
return node.Data.Item2;
45+
}
46+
47+
/// <summary>
48+
/// O(1) time complexity
49+
/// </summary>
50+
/// <param name="key"></param>
51+
/// <param name="value"></param>
52+
public void Put(K key, V value)
53+
{
54+
//evict last node of ddl if capacity overflows
55+
if (lookUp.Count == capacity)
56+
{
57+
var nodeToEvict = dll.Last();
58+
lookUp.Remove(nodeToEvict.Item1);
59+
dll.DeleteLast();
60+
}
61+
62+
//insert
63+
var newNode = dll.InsertFirst(new Tuple<K, V>(key, value));
64+
lookUp.Add(key, newNode);
65+
}
66+
}
67+
}

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -244,6 +244,8 @@ Note: On a decent desktop, in given implementations here for +ive random input i
244244
## Distributed Systems
245245

246246
- [X] Circular Queue (Ring Buffer) ([Implementation](https://github.com/justcoding121/Advanced-Algorithms/tree/develop/Advanced.Algorithms/DistributedSystems/CircularQueue.cs) | [Tests](https://github.com/justcoding121/Advanced-Algorithms/tree/develop/Advanced.Algorithms.Tests/DistributedSystems/CircularQueue_Tests.cs))
247+
- [X] Consistant Hash ([Implementation](https://github.com/justcoding121/Advanced-Algorithms/tree/develop/Advanced.Algorithms/DistributedSystems/ConsistantHash.cs) | [Tests](https://github.com/justcoding121/Advanced-Algorithms/tree/develop/Advanced.Algorithms.Tests/DistributedSystems/ConsistantHash_Tests.cs))
248+
- [X] LRU Cache ([Implementation](https://github.com/justcoding121/Advanced-Algorithms/tree/develop/Advanced.Algorithms/DistributedSystems/LRUCache.cs) | [Tests](https://github.com/justcoding121/Advanced-Algorithms/tree/develop/Advanced.Algorithms.Tests/DistributedSystems/LRUCache_Tests.cs))
247249

248250
## Numerical Methods
249251

0 commit comments

Comments
 (0)