diff --git a/optimizely/odp/__init__.py b/optimizely/odp/__init__.py
new file mode 100644
index 00000000..cd898c0e
--- /dev/null
+++ b/optimizely/odp/__init__.py
@@ -0,0 +1,12 @@
+# Copyright 2022, Optimizely
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/optimizely/odp/lru_cache.py b/optimizely/odp/lru_cache.py
new file mode 100644
index 00000000..e7fc32af
--- /dev/null
+++ b/optimizely/odp/lru_cache.py
@@ -0,0 +1,120 @@
+# Copyright 2022, Optimizely
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from dataclasses import dataclass, field
+import threading
+from time import time
+from collections import OrderedDict
+from typing import Optional, Generic, TypeVar, Hashable
+from sys import version_info
+
+if version_info < (3, 8):
+    from typing_extensions import Protocol
+else:
+    from typing import Protocol  # type: ignore
+
+# generic type definitions for LRUCache parameters
+K = TypeVar('K', bound=Hashable, contravariant=True)
+V = TypeVar('V')
+
+
+class LRUCache(Generic[K, V]):
+    """Least Recently Used cache that invalidates entries older than the timeout."""
+
+    def __init__(self, capacity: int, timeout_in_secs: int):
+        self.lock = threading.Lock()
+        self.map: OrderedDict[K, CacheElement[V]] = OrderedDict()
+        self.capacity = capacity
+        self.timeout = timeout_in_secs
+
+    def lookup(self, key: K) -> Optional[V]:
+        """Return the non-stale value associated with the provided key and move the
+        element to the end of the cache. If the selected value is stale, remove it from
+        the cache and clear the entire cache if stale.
+        """
+        if self.capacity <= 0:
+            return None
+
+        with self.lock:
+            if key not in self.map:
+                return None
+
+            self.map.move_to_end(key)
+            element = self.map[key]
+
+            if element._is_stale(self.timeout):
+                del self.map[key]
+                return None
+
+        return element.value
+
+    def save(self, key: K, value: V) -> None:
+        """Insert and/or move the provided key/value pair to the most recent end of the cache.
+        If the cache grows beyond the cache capacity, the least recently used element will be
+        removed.
+        """
+        if self.capacity <= 0:
+            return
+
+        with self.lock:
+            if key in self.map:
+                self.map.move_to_end(key)
+
+            self.map[key] = CacheElement(value)
+
+            if len(self.map) > self.capacity:
+                self.map.popitem(last=False)
+
+    def reset(self) -> None:
+        """ Clear the cache."""
+        if self.capacity <= 0:
+            return
+        with self.lock:
+            self.map.clear()
+
+    def peek(self, key: K) -> Optional[V]:
+        """Returns the value associated with the provided key without updating the cache."""
+        if self.capacity <= 0:
+            return None
+        with self.lock:
+            element = self.map.get(key)
+        return element.value if element is not None else None
+
+
+@dataclass
+class CacheElement(Generic[V]):
+    """Individual element for the LRUCache."""
+    value: V
+    timestamp: float = field(default_factory=time)
+
+    def _is_stale(self, timeout: float) -> bool:
+        """Returns True if the provided timeout has passed since the element's timestamp."""
+        if timeout <= 0:
+            return False
+        return time() - self.timestamp >= timeout
+
+
+class OptimizelySegmentsCache(Protocol):
+    """Protocol for implementing custom cache."""
+    def reset(self) -> None:
+        """ Clear the cache."""
+        ...
+
+    def lookup(self, key: str) -> Optional[list[str]]:
+        """Return the value associated with the provided key."""
+        ...
+
+    def save(self, key: str, value: list[str]) -> None:
+        """Save the key/value pair in the cache."""
+        ...
diff --git a/tests/test_lru_cache.py b/tests/test_lru_cache.py
new file mode 100644
index 00000000..acaf07cc
--- /dev/null
+++ b/tests/test_lru_cache.py
@@ -0,0 +1,135 @@
+# Copyright 2022, Optimizely
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import time
+from unittest import TestCase
+from optimizely.odp.lru_cache import LRUCache, OptimizelySegmentsCache
+
+
+class LRUCacheTest(TestCase):
+    def test_min_config(self):
+        cache = LRUCache(1000, 2000)
+        self.assertEqual(1000, cache.capacity)
+        self.assertEqual(2000, cache.timeout)
+
+        cache = LRUCache(0, 0)
+        self.assertEqual(0, cache.capacity)
+        self.assertEqual(0, cache.timeout)
+
+    def test_save_and_lookup(self):
+        max_size = 2
+        cache = LRUCache(max_size, 1000)
+
+        self.assertIsNone(cache.peek(1))
+        cache.save(1, 100)                       # [1]
+        cache.save(2, 200)                       # [1, 2]
+        cache.save(3, 300)                       # [2, 3]
+        self.assertIsNone(cache.peek(1))
+        self.assertEqual(200, cache.peek(2))
+        self.assertEqual(300, cache.peek(3))
+
+        cache.save(2, 201)                       # [3, 2]
+        cache.save(1, 101)                       # [2, 1]
+        self.assertEqual(101, cache.peek(1))
+        self.assertEqual(201, cache.peek(2))
+        self.assertIsNone(cache.peek(3))
+
+        self.assertIsNone(cache.lookup(3))       # [2, 1]
+        self.assertEqual(201, cache.lookup(2))   # [1, 2]
+        cache.save(3, 302)                       # [2, 3]
+        self.assertIsNone(cache.peek(1))
+        self.assertEqual(201, cache.peek(2))
+        self.assertEqual(302, cache.peek(3))
+
+        self.assertEqual(302, cache.lookup(3))   # [2, 3]
+        cache.save(1, 103)                       # [3, 1]
+        self.assertEqual(103, cache.peek(1))
+        self.assertIsNone(cache.peek(2))
+        self.assertEqual(302, cache.peek(3))
+
+        self.assertEqual(len(cache.map), max_size)
+        self.assertEqual(len(cache.map), cache.capacity)
+
+    def test_size_zero(self):
+        cache = LRUCache(0, 1000)
+
+        self.assertIsNone(cache.lookup(1))
+        cache.save(1, 100)                       # [1]
+        self.assertIsNone(cache.lookup(1))
+
+    def test_size_less_than_zero(self):
+        cache = LRUCache(-2, 1000)
+
+        self.assertIsNone(cache.lookup(1))
+        cache.save(1, 100)                       # [1]
+        self.assertIsNone(cache.lookup(1))
+
+    def test_timeout(self):
+        max_timeout = .5
+
+        cache = LRUCache(1000, max_timeout)
+
+        cache.save(1, 100)                       # [1]
+        cache.save(2, 200)                       # [1, 2]
+        cache.save(3, 300)                       # [1, 2, 3]
+        time.sleep(1.1)  # wait to expire
+        cache.save(4, 400)                       # [1, 2, 3, 4]
+        cache.save(1, 101)                       # [2, 3, 4, 1]
+
+        self.assertEqual(101, cache.lookup(1))   # [4, 1]
+        self.assertIsNone(cache.lookup(2))
+        self.assertIsNone(cache.lookup(3))
+        self.assertEqual(400, cache.lookup(4))
+
+    def test_timeout_zero(self):
+        max_timeout = 0
+        cache = LRUCache(1000, max_timeout)
+
+        cache.save(1, 100)                       # [1]
+        cache.save(2, 200)                       # [1, 2]
+        time.sleep(1)  # wait to expire
+
+        self.assertEqual(100, cache.lookup(1), "should not expire when timeout is 0")
+        self.assertEqual(200, cache.lookup(2))
+
+    def test_timeout_less_than_zero(self):
+        max_timeout = -2
+        cache = LRUCache(1000, max_timeout)
+
+        cache.save(1, 100)                       # [1]
+        cache.save(2, 200)                       # [1, 2]
+        time.sleep(1)  # wait to expire
+
+        self.assertEqual(100, cache.lookup(1), "should not expire when timeout is less than 0")
+        self.assertEqual(200, cache.lookup(2))
+
+    def test_reset(self):
+        cache = LRUCache(1000, 600)
+        cache.save('wow', 'great')
+        cache.save('tow', 'freight')
+
+        self.assertEqual(cache.lookup('wow'), 'great')
+        self.assertEqual(len(cache.map), 2)
+
+        cache.reset()
+
+        self.assertEqual(cache.lookup('wow'), None)
+        self.assertEqual(len(cache.map), 0)
+
+        cache.save('cow', 'crate')
+        self.assertEqual(cache.lookup('cow'), 'crate')
+
+    # type checker test
+    # confirm that LRUCache matches OptimizelySegmentsCache protocol
+    _: OptimizelySegmentsCache = LRUCache(0, 0)