diff --git a/djblets/cache/tests/test_backend.py b/djblets/cache/tests/test_backend.py
index 5e6f612bcda3b1042af66f4a1e82683498964574..8fe735be389c16188090ebd0cb07967962ed9f9b 100644
--- a/djblets/cache/tests/test_backend.py
+++ b/djblets/cache/tests/test_backend.py
@@ -2,8 +2,8 @@ import inspect
 import pickle
 import zlib
 
+import kgb
 from django.core.cache import cache
-from kgb import SpyAgency
 
 from djblets.cache.backend import (cache_memoize, cache_memoize_iter,
                                    make_cache_key,
@@ -11,130 +11,166 @@ from djblets.cache.backend import (cache_memoize, cache_memoize_iter,
 from djblets.testing.testcases import TestCase
 
 
-class CacheTests(SpyAgency, TestCase):
+class BaseCacheTestCase(kgb.SpyAgency, TestCase):
+    """Base class for cache-related unit tests."""
+
     def tearDown(self):
-        super(CacheTests, self).tearDown()
+        super().tearDown()
 
         cache.clear()
 
-    def test_cache_memoize(self):
-        """Testing cache_memoize"""
+    def build_test_chunk_data(self, num_chunks):
+        """Build enough test data to fill up the specified number of chunks.
+
+        This takes into account the size of the pickle data, and will
+        get us to exactly the specified number of chunks of data in the cache.
+
+        Args:
+            num_chunks (int):
+                The number of chunks to build.
+
+        Returns:
+            tuple:
+            A 2-tuple containing:
+
+            1. The data to cache.
+            2. The resulting pickled data.
+        """
+        data = 'x' * (CACHE_CHUNK_SIZE * num_chunks - 3 * num_chunks)
+        pickled_data = pickle.dumps(data, protocol=0)
+
+        self.assertTrue(pickled_data.startswith(b'Vxxxxxxxx'))
+        self.assertEqual(len(pickled_data), CACHE_CHUNK_SIZE * num_chunks)
+
+        return data, pickled_data
+
+
+class CacheMemoizeTests(BaseCacheTestCase):
+    """Unit tests for cache_memoize."""
+
+    def test_with_string(self):
+        """Testing cache_memoize with string"""
         cache_key = 'abc123'
         test_str = 'Test 123'
 
-        def cache_func(cache_called=[]):
-            self.assertTrue(not cache_called)
-            cache_called.append(True)
+        def cache_func():
             return test_str
 
+        self.spy_on(cache_func)
+
         result = cache_memoize(cache_key, cache_func)
         self.assertEqual(result, test_str)
+        self.assertSpyCallCount(cache_func, 1)
 
         # Call a second time. We should only call cache_func once.
         result = cache_memoize(cache_key, cache_func)
         self.assertEqual(result, test_str)
+        self.assertSpyCallCount(cache_func, 1)
 
-    def test_cache_memoize_with_unicode_data(self):
+    def test_with_unicode_data(self):
         """Testing cache_memoize with Unicode data"""
         cache_key = 'abc123'
         test_str = '🙃' * 5
 
-        def cache_func(cache_called=[]):
-            self.assertFalse(cache_called)
-            cache_called.append(True)
+        def cache_func():
             return test_str
 
+        self.spy_on(cache_func)
+
         result = cache_memoize(cache_key, cache_func)
         self.assertEqual(result, test_str)
+        self.assertSpyCallCount(cache_func, 1)
 
         # Call a second time. We should only call cache_func once.
         result = cache_memoize(cache_key, cache_func)
         self.assertEqual(result, test_str)
+        self.assertSpyCallCount(cache_func, 1)
 
-    def test_cache_memoize_with_non_sequence(self):
+    def test_with_non_sequence(self):
         """Testing cache_memoize with non-sequence type data"""
         cache_key = 'abc123'
         data = True
 
-        def cache_func(cache_called=[]):
-            self.assertFalse(cache_called)
-            cache_called.append(True)
+        def cache_func():
             return data
 
+        self.spy_on(cache_func)
+
         result = cache_memoize(cache_key, cache_func)
         self.assertEqual(result, data)
+        self.assertSpyCallCount(cache_func, 1)
 
         # Call a second time. We should only call cache_func once.
         result = cache_memoize(cache_key, cache_func)
         self.assertEqual(result, data)
+        self.assertSpyCallCount(cache_func, 1)
 
-    def test_cache_memoize_large_files_uncompressed(self):
+    def test_with_large_files_uncompressed(self):
         """Testing cache_memoize with large files without compression"""
         cache_key = 'abc123'
 
         # This takes into account the size of the pickle data, and will
         # get us to exactly 2 chunks of data in cache.
-        data, pickled_data = self._build_test_chunk_data(num_chunks=2)
+        data, pickled_data = self.build_test_chunk_data(num_chunks=2)
 
         def cache_func():
             return data
 
-        self.spy_on(cache_func, call_original=True)
+        self.spy_on(cache_func)
 
         result = cache_memoize(cache_key, cache_func, large_data=True,
                                compress_large_data=False)
         self.assertEqual(result, data)
-        self.assertTrue(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
         cache_key_0 = make_cache_key('%s-0' % cache_key)
         cache_key_1 = make_cache_key('%s-1' % cache_key)
 
-        self.assertTrue(make_cache_key(cache_key) in cache)
-        self.assertTrue(cache_key_0 in cache)
-        self.assertTrue(cache_key_1 in cache)
-        self.assertFalse(make_cache_key('%s-2' % cache_key) in cache)
+        self.assertIn(make_cache_key(cache_key), cache)
+        self.assertIn(cache_key_0, cache)
+        self.assertIn(cache_key_1, cache)
+        self.assertNotIn(make_cache_key('%s-2' % cache_key), cache)
 
         # Verify the contents of the stored data.
         stored_data = b''.join(cache.get(cache_key_0) + cache.get(cache_key_1))
         self.assertEqual(stored_data, pickled_data)
 
         # Try fetching the data we stored.
-        cache_func.spy.reset_calls()
-
         result = cache_memoize(cache_key, cache_func, large_data=True,
                                compress_large_data=False)
         self.assertEqual(result, data)
-        self.assertFalse(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
-    def test_cache_memoize_large_files_uncompressed_off_by_one(self):
+    def test_with_large_files_uncompressed_off_by_one(self):
         """Testing cache_memoize with large files without compression and
-        one byte larger than an even chunk size."""
+        one byte larger than an even chunk size
+        """
         cache_key = 'abc123'
 
         # This takes into account the size of the pickle data, and will
         # get us to just barely 3 chunks of data in cache.
-        data = self._build_test_chunk_data(num_chunks=2)[0] + 'x'
+        data = self.build_test_chunk_data(num_chunks=2)[0] + 'x'
         pickled_data = pickle.dumps(data, protocol=0)
 
         def cache_func():
             return data
 
-        self.spy_on(cache_func, call_original=True)
+        self.spy_on(cache_func)
 
         result = cache_memoize(cache_key, cache_func, large_data=True,
                                compress_large_data=False)
         self.assertEqual(result, data)
-        self.assertTrue(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
         cache_key_0 = make_cache_key('%s-0' % cache_key)
         cache_key_1 = make_cache_key('%s-1' % cache_key)
         cache_key_2 = make_cache_key('%s-2' % cache_key)
 
-        self.assertTrue(make_cache_key(cache_key) in cache)
-        self.assertTrue(cache_key_0 in cache)
-        self.assertTrue(cache_key_1 in cache)
-        self.assertTrue(cache_key_2 in cache)
-        self.assertFalse(make_cache_key('%s-3' % cache_key) in cache)
+        self.assertIn(make_cache_key(cache_key), cache)
+        self.assertIn(cache_key_0, cache)
+        self.assertIn(cache_key_1, cache)
+        self.assertIn(cache_key_2, cache)
+        self.assertNotIn(make_cache_key('%s-3' % cache_key), cache)
 
         # Verify the contents of the stored data.
         stored_data = b''.join(cache.get(cache_key_0) +
@@ -143,47 +179,44 @@ class CacheTests(SpyAgency, TestCase):
         self.assertEqual(stored_data, pickled_data)
 
         # Try fetching the data we stored.
-        cache_func.spy.reset_calls()
-
         result = cache_memoize(cache_key, cache_func, large_data=True,
                                compress_large_data=False)
         self.assertEqual(result, data)
-        self.assertFalse(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
-    def test_cache_memoize_large_files_compressed(self):
+    def test_with_large_files_compressed(self):
         """Testing cache_memoize with large files with compression"""
         cache_key = 'abc123'
 
-        data, pickled_data = self._build_test_chunk_data(num_chunks=2)
+        data, pickled_data = self.build_test_chunk_data(num_chunks=2)
 
         def cache_func():
             return data
 
-        self.spy_on(cache_func, call_original=True)
+        self.spy_on(cache_func)
 
         result = cache_memoize(cache_key, cache_func, large_data=True,
                                compress_large_data=True)
-        self.assertTrue(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
         cache_key_0 = make_cache_key('%s-0' % cache_key)
 
-        self.assertTrue(make_cache_key(cache_key) in cache)
-        self.assertTrue(cache_key_0 in cache)
-        self.assertFalse(make_cache_key('%s-1' % cache_key) in cache)
-        self.assertFalse(make_cache_key('%s-2' % cache_key) in cache)
+        self.assertIn(make_cache_key(cache_key), cache)
+        self.assertIn(cache_key_0, cache)
+        self.assertNotIn(make_cache_key('%s-1' % cache_key), cache)
+        self.assertNotIn(make_cache_key('%s-2' % cache_key), cache)
 
         # Verify the contents of the stored data.
         stored_data = cache.get(cache_key_0)[0]
         self.assertEqual(stored_data, zlib.compress(pickled_data))
 
         # Try fetching the data we stored.
-        cache_func.spy.reset_calls()
         result = cache_memoize(cache_key, cache_func, large_data=True,
                                compress_large_data=True)
         self.assertEqual(result, data)
-        self.assertFalse(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
-    def test_cache_memoize_large_files_load_uncompressed(self):
+    def test_with_large_files_load_uncompressed(self):
         """Testing cache_memoize with large files without compression and
         loading data
         """
@@ -191,7 +224,7 @@ class CacheTests(SpyAgency, TestCase):
 
         # This takes into account the size of the pickle data, and will
         # get us to exactly 2 chunks of data in cache.
-        data, pickled_data = self._build_test_chunk_data(num_chunks=2)
+        data, pickled_data = self.build_test_chunk_data(num_chunks=2)
 
         cache.set(make_cache_key(cache_key), '2')
         cache.set(make_cache_key('%s-0' % cache_key),
@@ -202,22 +235,22 @@ class CacheTests(SpyAgency, TestCase):
         def cache_func():
             return ''
 
-        self.spy_on(cache_func, call_original=True)
+        self.spy_on(cache_func)
 
         result = cache_memoize(cache_key, cache_func, large_data=True,
                                compress_large_data=False)
         self.assertEqual(result, data)
-        self.assertFalse(cache_func.spy.called)
+        self.assertSpyNotCalled(cache_func)
 
-    def test_cache_memoize_large_files_load_compressed(self):
+    def test_with_large_files_load_compressed(self):
         """Testing cache_memoize with large files with compression and
         loading cached data
         """
         cache_key = 'abc123'
 
-        data, pickled_data = self._build_test_chunk_data(num_chunks=2)
+        data, pickled_data = self.build_test_chunk_data(num_chunks=2)
         stored_data = zlib.compress(pickled_data)
-        self.assertTrue(len(stored_data) < CACHE_CHUNK_SIZE)
+        self.assertLess(len(stored_data), CACHE_CHUNK_SIZE)
 
         cache.set(make_cache_key(cache_key), '1')
         cache.set(make_cache_key('%s-0' % cache_key), [stored_data])
@@ -225,21 +258,21 @@ class CacheTests(SpyAgency, TestCase):
         def cache_func():
             return ''
 
-        self.spy_on(cache_func, call_original=True)
+        self.spy_on(cache_func)
 
         result = cache_memoize(cache_key, cache_func, large_data=True,
                                compress_large_data=True)
         self.assertEqual(result, data)
-        self.assertFalse(cache_func.spy.called)
+        self.assertSpyNotCalled(cache_func)
 
-    def test_cache_memoize_large_files_missing_chunk(self):
+    def test_with_large_files_missing_chunk(self):
         """Testing cache_memoize with loading large files with missing chunks
         """
         cache_key = 'abc123'
 
         # This takes into account the size of the pickle data, and will
         # get us to exactly 2 chunks of data in cache.
-        data, pickled_data = self._build_test_chunk_data(num_chunks=2)
+        data, pickled_data = self.build_test_chunk_data(num_chunks=2)
 
         cache.set(make_cache_key(cache_key), '2')
         cache.set(make_cache_key('%s-0' % cache_key),
@@ -248,21 +281,24 @@ class CacheTests(SpyAgency, TestCase):
         def cache_func():
             return data
 
-        self.spy_on(cache_func, call_original=True)
+        self.spy_on(cache_func)
 
         result = cache_memoize(cache_key, cache_func, large_data=True,
                                compress_large_data=False)
-        self.assertEqual(len(result), len(data))
         self.assertEqual(result, data)
-        self.assertTrue(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
-    def test_cache_memoize_iter_uncompressed(self):
+
+class CacheMemoizeIterTests(BaseCacheTestCase):
+    """Unit tests for cache_memoize_iter."""
+
+    def test_without_compression(self):
         """Testing cache_memoize_iter without compression"""
         cache_key = 'abc123'
         data_yielded = []
 
-        data1, pickled_data_1 = self._build_test_chunk_data(num_chunks=2)
-        data2, pickled_data_2 = self._build_test_chunk_data(num_chunks=2)
+        data1, pickled_data_1 = self.build_test_chunk_data(num_chunks=2)
+        data2, pickled_data_2 = self.build_test_chunk_data(num_chunks=2)
 
         def cache_func():
             data_yielded.append('data1')
@@ -271,7 +307,7 @@ class CacheTests(SpyAgency, TestCase):
             data_yielded.append('data2')
             yield data2
 
-        self.spy_on(cache_func, call_original=True)
+        self.spy_on(cache_func)
 
         result = cache_memoize_iter(cache_key, cache_func,
                                     compress_large_data=False)
@@ -287,7 +323,7 @@ class CacheTests(SpyAgency, TestCase):
         with self.assertRaises(StopIteration):
             next(result)
 
-        self.assertTrue(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
         cache_key_main = make_cache_key(cache_key)
         cache_key_0 = make_cache_key('%s-0' % cache_key)
@@ -295,12 +331,12 @@ class CacheTests(SpyAgency, TestCase):
         cache_key_2 = make_cache_key('%s-2' % cache_key)
         cache_key_3 = make_cache_key('%s-3' % cache_key)
 
-        self.assertTrue(cache_key_main in cache)
-        self.assertTrue(cache_key_0 in cache)
-        self.assertTrue(cache_key_1 in cache)
-        self.assertTrue(cache_key_2 in cache)
-        self.assertTrue(cache_key_3 in cache)
-        self.assertFalse(make_cache_key('%s-4' % cache_key) in cache)
+        self.assertIn(cache_key_main, cache)
+        self.assertIn(cache_key_0, cache)
+        self.assertIn(cache_key_1, cache)
+        self.assertIn(cache_key_2, cache)
+        self.assertIn(cache_key_3, cache)
+        self.assertNotIn(make_cache_key('%s-4' % cache_key), cache)
 
         # Verify the contents of the stored data.
         stored_data = b''.join(cache.get(cache_key_0) +
@@ -311,7 +347,6 @@ class CacheTests(SpyAgency, TestCase):
         self.assertEqual(stored_data, pickled_data_1 + pickled_data_2)
 
         # Try fetching the data we stored.
-        cache_func.spy.reset_calls()
         data_yielded = []
 
         result = cache_memoize_iter(cache_key, cache_func,
@@ -325,17 +360,17 @@ class CacheTests(SpyAgency, TestCase):
             next(result)
 
         self.assertEqual(data_yielded, [])
-        self.assertFalse(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
-    def test_cache_memoize_iter_compressed(self):
+    def test_with_compressed(self):
         """Testing cache_memoize_iter with compression"""
         cache_key = 'abc123'
         data_yielded = []
 
         # This takes into account the size of the pickle data, and will
         # get us to exactly 2 chunks of data in cache, each.
-        data1, pickled_data_1 = self._build_test_chunk_data(num_chunks=2)
-        data2, pickled_data_2 = self._build_test_chunk_data(num_chunks=2)
+        data1, pickled_data_1 = self.build_test_chunk_data(num_chunks=2)
+        data2, pickled_data_2 = self.build_test_chunk_data(num_chunks=2)
 
         def cache_func():
             data_yielded.append('data1')
@@ -344,7 +379,7 @@ class CacheTests(SpyAgency, TestCase):
             data_yielded.append('data2')
             yield data2
 
-        self.spy_on(cache_func, call_original=True)
+        self.spy_on(cache_func)
 
         result = cache_memoize_iter(cache_key, cache_func,
                                     compress_large_data=True)
@@ -360,14 +395,14 @@ class CacheTests(SpyAgency, TestCase):
         with self.assertRaises(StopIteration):
             next(result)
 
-        self.assertTrue(cache_func.spy.called)
+        self.assertSpyCallCount(cache_func, 1)
 
         cache_key_main = make_cache_key(cache_key)
         cache_key_0 = make_cache_key('%s-0' % cache_key)
 
-        self.assertTrue(cache_key_main in cache)
-        self.assertTrue(cache_key_0 in cache)
-        self.assertFalse(make_cache_key('%s-1' % cache_key) in cache)
+        self.assertIn(cache_key_main, cache)
+        self.assertIn(cache_key_0, cache)
+        self.assertNotIn(make_cache_key('%s-1' % cache_key), cache)
 
         # Verify the contents of the stored data.
         self.assertEqual(cache.get(cache_key_main), '1')
@@ -375,7 +410,6 @@ class CacheTests(SpyAgency, TestCase):
                          zlib.compress(pickled_data_1 + pickled_data_2))
 
         # Try fetching the data we stored.
-        cache_func.spy.reset_calls()
         data_yielded = []
 
         result = cache_memoize_iter(cache_key, cache_func,
@@ -388,18 +422,4 @@ class CacheTests(SpyAgency, TestCase):
             next(result)
 
         self.assertEqual(data_yielded, [])
-        self.assertFalse(cache_func.spy.called)
-
-    def _build_test_chunk_data(self, num_chunks):
-        """Build enough test data to fill up the specified number of chunks.
-
-        This takes into account the size of the pickle data, and will
-        get us to exactly the specified number of chunks of data in the cache.
-        """
-        data = 'x' * (CACHE_CHUNK_SIZE * num_chunks - 3 * num_chunks)
-        pickled_data = pickle.dumps(data, protocol=0)
-
-        self.assertTrue(pickled_data.startswith(b'Vxxxxxxxx'))
-        self.assertEqual(len(pickled_data), CACHE_CHUNK_SIZE * num_chunks)
-
-        return data, pickled_data
+        self.assertSpyCallCount(cache_func, 1)
