diff --git a/djblets/auth/forms.py b/djblets/auth/forms.py
index ee93c8378d652eced29516fb59754be3c9f7e782..9ed1c82721c8b498bb1fd016a41d31fa298d24f6 100644
--- a/djblets/auth/forms.py
+++ b/djblets/auth/forms.py
@@ -28,10 +28,11 @@ from __future__ import unicode_literals
 
 from django import forms
 from django.contrib import auth
+from django.contrib.auth.models import User
 from django.core.exceptions import ValidationError
 from django.utils import six
 
-from djblets.util.misc import get_object_or_none
+from djblets.db.query import get_object_or_none
 
 
 class RegistrationForm(forms.Form):
diff --git a/djblets/auth/views.py b/djblets/auth/views.py
index 0bdcffd531323356b3494b468559ad26d80a79b2..19d1b4a53aea53b8cdf6f4a7b35506640a62223f 100644
--- a/djblets/auth/views.py
+++ b/djblets/auth/views.py
@@ -33,8 +33,7 @@ from django.template.context import RequestContext
 from django.http import HttpResponseRedirect
 
 from djblets.auth.forms import RegistrationForm
-from djblets.auth.util import (validate_test_cookie,
-                               validate_old_password)
+from djblets.auth.util import validate_test_cookie
 
 
 ###########################
diff --git a/djblets/cache/backend.py b/djblets/cache/backend.py
new file mode 100644
index 0000000000000000000000000000000000000000..21faeeefa71b932f7cd2b8d37f01266579c15c92
--- /dev/null
+++ b/djblets/cache/backend.py
@@ -0,0 +1,173 @@
+from __future__ import unicode_literals
+from hashlib import md5
+import logging
+import zlib
+
+from django.conf import settings
+from django.core.cache import cache
+from django.contrib.sites.models import Site
+
+from djblets.cache.errors import MissingChunkError
+from djblets.util.compat.six.moves import (cPickle as pickle,
+                                           cStringIO as StringIO)
+
+
+DEFAULT_EXPIRATION_TIME = 60 * 60 * 24 * 30 # 1 month
+CACHE_CHUNK_SIZE = 2**20 - 1024 # almost 1M (memcached's slab limit)
+
+# memcached key size constraint (typically 250, but leave a few bytes for the
+# large data handling)
+MAX_KEY_SIZE = 240
+
+
+def _cache_fetch_large_data(cache, key, compress_large_data):
+    chunk_count = cache.get(make_cache_key(key))
+    data = []
+
+    chunk_keys = [make_cache_key('%s-%d' % (key, i))
+                  for i in range(int(chunk_count))]
+    chunks = cache.get_many(chunk_keys)
+    for chunk_key in chunk_keys:
+        try:
+            data.append(chunks[chunk_key][0])
+        except KeyError:
+            logging.debug('Cache miss for key %s.' % chunk_key)
+            raise MissingChunkError
+
+    data = b''.join(data)
+
+    if compress_large_data:
+        data = zlib.decompress(data)
+
+    try:
+        unpickler = pickle.Unpickler(StringIO(data))
+        data = unpickler.load()
+    except Exception as e:
+        logging.warning('Unpickle error for cache key "%s": %s.' % (key, e))
+        raise e
+
+    return data
+
+
+def _cache_store_large_data(cache, key, data, expiration, compress_large_data):
+    # We store large data in the cache broken into chunks that are 1M in size.
+    # To do this easily, we first pickle the data and compress it with zlib.
+    # This gives us a string which can be chunked easily. These are then stored
+    # individually in the cache as single-element lists (so the cache backend
+    # doesn't try to convert binary data to utf8). The number of chunks needed
+    # is stored in the cache under the unadorned key
+    file = StringIO()
+    pickler = pickle.Pickler(file)
+    pickler.dump(data)
+    data = file.getvalue()
+
+    if compress_large_data:
+        data = zlib.compress(data)
+
+    i = 0
+    while len(data) > CACHE_CHUNK_SIZE:
+        chunk = data[0:CACHE_CHUNK_SIZE]
+        data = data[CACHE_CHUNK_SIZE:]
+        cache.set(make_cache_key('%s-%d' % (key, i)), [chunk], expiration)
+        i += 1
+    cache.set(make_cache_key('%s-%d' % (key, i)), [data], expiration)
+
+    cache.set(make_cache_key(key), '%d' % (i + 1), expiration)
+
+
+def cache_memoize(key, lookup_callable,
+                  expiration=getattr(settings, 'CACHE_EXPIRATION_TIME',
+                                     DEFAULT_EXPIRATION_TIME),
+                  force_overwrite=False,
+                  large_data=False,
+                  compress_large_data=True):
+    """Memoize the results of a callable inside the configured cache.
+
+    Keyword arguments:
+    expiration          -- The expiration time for the key.
+    force_overwrite     -- If True, the value will always be computed and stored
+                           regardless of whether it exists in the cache already.
+    large_data          -- If True, the resulting data will be pickled, gzipped,
+                           and (potentially) split up into megabyte-sized chunks.
+                           This is useful for very large, computationally
+                           intensive hunks of data which we don't want to store
+                           in a database due to the way things are accessed.
+    compress_large_data -- Compresses the data with zlib compression when
+                           large_data is True.
+    """
+    if large_data:
+        if not force_overwrite and make_cache_key(key) in cache:
+            try:
+                data = _cache_fetch_large_data(cache, key, compress_large_data)
+                return data
+            except Exception as e:
+                logging.warning('Failed to fetch large data from cache for '
+                                'key %s: %s.' % (key, e))
+        else:
+            logging.debug('Cache miss for key %s.' % key)
+
+        data = lookup_callable()
+        _cache_store_large_data(cache, key, data, expiration,
+                                compress_large_data)
+        return data
+
+    else:
+        key = make_cache_key(key)
+        if not force_overwrite and key in cache:
+            return cache.get(key)
+        data = lookup_callable()
+
+        # Most people will be using memcached, and memcached has a limit of 1MB.
+        # Data this big should be broken up somehow, so let's warn about this.
+        # Users should hopefully be using large_data=True in this case.
+        # XXX - since 'data' may be a sequence that's not a string/unicode,
+        #       this can fail. len(data) might be something like '6' but the
+        #       data could exceed a megabyte. The best way to catch this would
+        #       be an exception, but while python-memcached defines an exception
+        #       type for this, it never uses it, choosing instead to fail
+        #       silently. WTF.
+        if len(data) >= CACHE_CHUNK_SIZE:
+            logging.warning('Cache data for key "%s" (length %s) may be too '
+                            'big for the cache.' % (key, len(data)))
+
+        try:
+            cache.set(key, data, expiration)
+        except:
+            pass
+        return data
+
+
+def make_cache_key(key):
+    """Creates a cache key guaranteed to avoid conflicts and size limits.
+
+    The cache key will be prefixed by the site's domain, and will be
+    changed to an MD5SUM if it's larger than the maximum key size.
+    """
+    try:
+        site = Site.objects.get_current()
+
+        # The install has a Site app, so prefix the domain to the key.
+        # If a SITE_ROOT is defined, also include that, to allow for multiple
+        # instances on the same host.
+        site_root = getattr(settings, 'SITE_ROOT', None)
+
+        if site_root:
+            key = '%s:%s:%s' % (site.domain, site_root, key)
+        else:
+            key = '%s:%s' % (site.domain, key)
+    except:
+        # The install doesn't have a Site app, so use the key as-is.
+        pass
+
+    # Adhere to memcached key size limit
+    if len(key) > MAX_KEY_SIZE:
+        digest = md5(key.encode('utf-8')).hexdigest();
+
+        # Replace the excess part of the key with a digest of the key
+        key = key[:MAX_KEY_SIZE - len(digest)] + digest
+
+    # Make sure this is a non-unicode string, in order to prevent errors
+    # with some backends.
+    key = key.encode('utf-8')
+
+    return key
diff --git a/djblets/cache/backend_compat.py b/djblets/cache/backend_compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..260df32a1680640d5957766277d99de902ea5411
--- /dev/null
+++ b/djblets/cache/backend_compat.py
@@ -0,0 +1,56 @@
+from __future__ import unicode_literals
+import logging
+
+from django.core.cache import (DEFAULT_CACHE_ALIAS, parse_backend_uri,
+                               InvalidCacheBackendError)
+
+
+BACKEND_CLASSES = {
+    'db': 'db.DatabaseCache',
+    'dummy': 'dummy.DummyCache',
+    'file': 'filebased.FileBasedCache',
+    'locmem': 'locmem.LocMemCache',
+    'memcached': 'memcached.CacheClass',
+}
+
+
+def normalize_cache_backend(cache_backend):
+    """Returns a new-style CACHES dictionary from any given cache_backend.
+
+    Django has supported two formats for a cache backend. The old-style
+    CACHE_BACKEND string, and the new-style CACHES dictionary.
+
+    This function will accept either as input and return a cahe backend in the
+    form of a CACHES dictionary as a result. The result won't be a full-on
+    CACHES, with named cache entries inside. Rather, it will be a cache entry.
+
+    If a CACHES dictionary is passed, the "default" cache will be the result.
+    """
+    if not cache_backend:
+        return {}
+
+    if isinstance(cache_backend, dict):
+        if DEFAULT_CACHE_ALIAS in cache_backend:
+            return cache_backend[DEFAULT_CACHE_ALIAS]
+
+        return {}
+
+    try:
+        engine, host, params = parse_backend_uri(cache_backend)
+    except InvalidCacheBackendError as e:
+        logging.error('Invalid cache backend (%s) found while loading '
+                      'siteconfig: %s' % (cache_backend, e))
+        return {}
+
+    if engine in BACKEND_CLASSES:
+        engine = 'django.core.cache.backends.%s' % BACKEND_CLASSES[engine]
+    else:
+        engine = '%s.CacheClass' % engine
+
+    defaults = {
+        'BACKEND': engine,
+        'LOCATION': host,
+    }
+    defaults.update(params)
+
+    return defaults
diff --git a/djblets/cache/context_processors.py b/djblets/cache/context_processors.py
new file mode 100644
index 0000000000000000000000000000000000000000..fe0ee6432c05f967034bfa6ad500ef752eb1aa08
--- /dev/null
+++ b/djblets/cache/context_processors.py
@@ -0,0 +1,30 @@
+from __future__ import unicode_literals
+
+from django.conf import settings
+
+
+def media_serial(request):
+    """
+    Exposes a media serial number that can be appended to a media filename
+    in order to make a URL that can be cached forever without fear of change.
+    The next time the file is updated and the server is restarted, a new
+    path will be accessed and cached.
+
+    This returns the value of settings.MEDIA_SERIAL, which must either be
+    set manually or ideally should be set to the value of
+    djblets.cache.serials.generate_media_serial().
+    """
+    return {'MEDIA_SERIAL': getattr(settings, "MEDIA_SERIAL", "")}
+
+
+def ajax_serial(request):
+    """
+    Exposes a serial number that can be appended to filenames involving
+    dynamic loads of URLs in order to make a URL that can be cached forever
+    without fear of change.
+
+    This returns the value of settings.AJAX_SERIAL, which must either be
+    set manually or ideally should be set to the value of
+    djblets.cache.serials.generate_ajax_serial().
+    """
+    return {'AJAX_SERIAL': getattr(settings, "AJAX_SERIAL", "")}
diff --git a/djblets/cache/errors.py b/djblets/cache/errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..3a1dc97094c877bde0d96469f64c2ac4f8623e35
--- /dev/null
+++ b/djblets/cache/errors.py
@@ -0,0 +1,2 @@
+class MissingChunkError(Exception):
+    pass
diff --git a/djblets/cache/serials.py b/djblets/cache/serials.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca7e9c8497d160c3019db0dd8fd796e31f80dc8b
--- /dev/null
+++ b/djblets/cache/serials.py
@@ -0,0 +1,102 @@
+from __future__ import unicode_literals
+import logging
+import os
+
+from django.conf import settings
+from django.utils import importlib
+
+
+def generate_media_serial():
+    """
+    Generates a media serial number that can be appended to a media filename
+    in order to make a URL that can be cached forever without fear of change.
+    The next time the file is updated and the server is restarted, a new
+    path will be accessed and cached.
+
+    This will crawl the media files (using directories in MEDIA_SERIAL_DIRS if
+    specified, or all of STATIC_ROOT otherwise), figuring out the latest
+    timestamp, and return that value.
+    """
+    MEDIA_SERIAL = getattr(settings, "MEDIA_SERIAL", 0)
+
+    if not MEDIA_SERIAL:
+        media_dirs = getattr(settings, "MEDIA_SERIAL_DIRS", ["."])
+
+        for media_dir in media_dirs:
+            media_path = os.path.join(settings.STATIC_ROOT, media_dir)
+
+            for root, dirs, files in os.walk(media_path):
+                for name in files:
+                    mtime = int(os.stat(os.path.join(root, name)).st_mtime)
+
+                    if mtime > MEDIA_SERIAL:
+                        MEDIA_SERIAL = mtime
+
+        setattr(settings, "MEDIA_SERIAL", MEDIA_SERIAL)
+
+
+def generate_ajax_serial():
+    """
+    Generates a serial number that can be appended to filenames involving
+    dynamic loads of URLs in order to make a URL that can be cached forever
+    without fear of change.
+
+    This will crawl the template files (using directories in TEMPLATE_DIRS),
+    figuring out the latest timestamp, and return that value.
+    """
+    AJAX_SERIAL = getattr(settings, "AJAX_SERIAL", 0)
+
+    if not AJAX_SERIAL:
+        template_dirs = getattr(settings, "TEMPLATE_DIRS", ["."])
+
+        for template_path in template_dirs:
+            for root, dirs, files in os.walk(template_path):
+                for name in files:
+                    mtime = int(os.stat(os.path.join(root, name)).st_mtime)
+
+                    if mtime > AJAX_SERIAL:
+                        AJAX_SERIAL = mtime
+
+        setattr(settings, "AJAX_SERIAL", AJAX_SERIAL)
+
+
+def generate_locale_serial(packages):
+    """Generate a locale serial for the given set of packages.
+
+    This will be equal to the most recent mtime of all the .mo files that
+    contribute to the localization of the given packages.
+    """
+    serial = 0
+
+    paths = []
+    for package in packages:
+        try:
+            p = importlib.import_module(package)
+            path = os.path.join(os.path.dirname(p.__file__), 'locale')
+            paths.append(path)
+        except Exception as e:
+            logging.error(
+                'Failed to import package %s to compute locale serial: %s'
+                % (package, e))
+
+    for locale_path in paths:
+        for root, dirs, files in os.walk(locale_path):
+            for name in files:
+                if name.endswith('.mo'):
+                    mtime = int(os.stat(os.path.join(root, name)).st_mtime)
+                    if mtime > serial:
+                        serial = mtime
+
+    return serial
+
+
+def generate_cache_serials():
+    """
+    Wrapper around generate_media_serial and generate_ajax_serial to
+    generate all serial numbers in one go.
+
+    This should be called early in the startup, such as in the site's
+    main urls.py.
+    """
+    generate_media_serial()
+    generate_ajax_serial()
diff --git a/djblets/db/evolution.py b/djblets/db/evolution.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7fb5d0bc870180b4808b77abb6f24fd19a31cd5
--- /dev/null
+++ b/djblets/db/evolution.py
@@ -0,0 +1,60 @@
+#
+# dbevolution.py -- Helpers for database evolutions
+#
+# Copyright (c) 2008-2009  Christian Hammond
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+from __future__ import unicode_literals
+
+from django_evolution.mutations import BaseMutation
+
+
+class FakeChangeFieldType(BaseMutation):
+    """
+    Changes the type of the field to a similar type.
+    This is intended only when the new type is really a version of the
+    old type, such as a subclass of that Field object. The two fields
+    should be compatible or there could be migration issues.
+    """
+    def __init__(self, model_name, field_name, new_type):
+        self.model_name = model_name
+        self.field_name = field_name
+        self.new_type = new_type
+
+    def __repr__(self):
+        return "FakeChangeFieldType('%s', '%s', '%s')" % \
+            (self.model_name, self.field_name, self.new_type)
+
+    def simulate(self, app_label, proj_sig):
+        app_sig = proj_sig[app_label]
+        model_sig = app_sig[self.model_name]
+        field_dict = model_sig['fields']
+        field_sig = field_dict[self.field_name]
+
+        field_sig['field_type'] = self.new_type
+
+    def mutate(self, app_label, proj_sig):
+        # We can just call simulate, since it does the same thing.
+        # We're not actually generating SQL, but rather tricking
+        # Django Evolution.
+        self.simulate(app_label, proj_sig)
+        return ""
diff --git a/djblets/db/fields.py b/djblets/db/fields.py
new file mode 100644
index 0000000000000000000000000000000000000000..e11f013a30ad63f2d1e3ca7d46c2b5db78d05c27
--- /dev/null
+++ b/djblets/db/fields.py
@@ -0,0 +1,359 @@
+#
+# fields.py -- Model fields.
+#
+# Copyright (c) 2007-2008  Christian Hammond
+# Copyright (c) 2007-2008  David Trowbridge
+# Copyright (c) 2008-2013  Beanbag, Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from __future__ import unicode_literals
+from ast import literal_eval
+from datetime import datetime
+import base64
+import json
+import logging
+
+from django.conf import settings
+from django.core.serializers.json import DjangoJSONEncoder
+from django.db import models
+from django.db.models import F
+from django.utils import six
+from django.utils.encoding import smart_unicode
+
+from djblets.db.validators import validate_json
+from djblets.util.dates import get_tz_aware_utcnow
+
+
+class Base64DecodedValue(str):
+    """
+    A subclass of string that can be identified by Base64Field, in order
+    to prevent double-encoding or double-decoding.
+    """
+    pass
+
+
+class Base64FieldCreator(object):
+    def __init__(self, field):
+        self.field = field
+
+    def __set__(self, obj, value):
+        pk_val = obj._get_pk_val(obj.__class__._meta)
+        pk_set = pk_val is not None and smart_unicode(pk_val) != ''
+
+        if (isinstance(value, Base64DecodedValue) or not pk_set):
+            obj.__dict__[self.field.name] = base64.encodestring(value)
+        else:
+            obj.__dict__[self.field.name] = value
+
+        setattr(obj, "%s_initted" % self.field.name, True)
+
+    def __get__(self, obj, type=None):
+        if obj is None:
+            raise AttributeError('Can only be accessed via an instance.')
+
+        value = obj.__dict__[self.field.name]
+
+        if value is None:
+            return None
+        else:
+            return Base64DecodedValue(base64.decodestring(value))
+
+
+class Base64Field(models.TextField):
+    """
+    A subclass of TextField that encodes its data as base64 in the database.
+    This is useful if you're dealing with unknown encodings and must guarantee
+    that no modifications to the text occurs and that you can read/write
+    the data in any database with any encoding.
+    """
+    serialize_to_string = True
+
+    def contribute_to_class(self, cls, name):
+        super(Base64Field, self).contribute_to_class(cls, name)
+        setattr(cls, self.name, Base64FieldCreator(self))
+
+    def get_db_prep_value(self, value, connection=None, prepared=False):
+        if isinstance(value, Base64DecodedValue):
+            value = base64.encodestring(value)
+
+        return value
+
+    def save_form_data(self, instance, data):
+        setattr(instance, self.name, Base64DecodedValue(data))
+
+    def to_python(self, value):
+        if isinstance(value, Base64DecodedValue):
+            return value
+        else:
+            return Base64DecodedValue(base64.decodestring(value))
+
+    def value_to_string(self, obj):
+        value = self._get_val_from_obj(obj)
+
+        if isinstance(value, Base64DecodedValue):
+            return base64.encodestring(value)
+        else:
+            return value
+
+
+class ModificationTimestampField(models.DateTimeField):
+    """
+    A subclass of DateTimeField that only auto-updates the timestamp when
+    updating an existing object or when the value of the field is None. This
+    specialized field is equivalent to DateTimeField's auto_now=True, except
+    it allows for custom timestamp values (needed for
+    serialization/deserialization).
+    """
+    def __init__(self, verbose_name=None, name=None, **kwargs):
+        kwargs.update({
+            'editable': False,
+            'blank': True,
+        })
+        models.DateTimeField.__init__(self, verbose_name, name, **kwargs)
+
+    def pre_save(self, model, add):
+        if not add or getattr(model, self.attname) is None:
+
+            if settings.USE_TZ:
+                value = get_tz_aware_utcnow()
+            else:
+                value = datetime.now()
+
+            setattr(model, self.attname, value)
+            return value
+
+        return super(ModificationTimestampField, self).pre_save(model, add)
+
+    def get_internal_type(self):
+        return "DateTimeField"
+
+
+class JSONField(models.TextField):
+    """
+    A field for storing JSON-encoded data. The data is accessible as standard
+    Python data types and is transparently encoded/decoded to/from a JSON
+    string in the database.
+    """
+    serialize_to_string = True
+    default_validators = [validate_json]
+
+    def __init__(self, verbose_name=None, name=None,
+                 encoder=DjangoJSONEncoder(), **kwargs):
+        blank = kwargs.pop('blank', True)
+        models.TextField.__init__(self, verbose_name, name, blank=blank,
+                                  **kwargs)
+        self.encoder = encoder
+
+    def contribute_to_class(self, cls, name):
+        def get_json(model_instance):
+            return self.dumps(getattr(model_instance, self.attname, None))
+
+        def set_json(model_instance, json):
+            setattr(model_instance, self.attname, self.loads(json))
+
+        super(JSONField, self).contribute_to_class(cls, name)
+
+        setattr(cls, "get_%s_json" % self.name, get_json)
+        setattr(cls, "set_%s_json" % self.name, set_json)
+
+        models.signals.post_init.connect(self.post_init, sender=cls)
+
+    def pre_save(self, model_instance, add):
+        return self.dumps(getattr(model_instance, self.attname, None))
+
+    def post_init(self, instance=None, **kwargs):
+        value = self.value_from_object(instance)
+
+        if value:
+            value = self.loads(value)
+        else:
+            value = {}
+
+        setattr(instance, self.attname, value)
+
+    def get_db_prep_save(self, value, *args, **kwargs):
+        if not isinstance(value, six.string_types):
+            value = self.dumps(value)
+
+        return super(JSONField, self).get_db_prep_save(value, *args, **kwargs)
+
+    def value_to_string(self, obj):
+        return self.dumps(self.value_from_object(obj))
+
+    def dumps(self, data):
+        if isinstance(data, six.string_types):
+            return data
+        else:
+            return self.encoder.encode(data)
+
+    def loads(self, val):
+        try:
+            val = json.loads(val, encoding=settings.DEFAULT_CHARSET)
+
+            # XXX We need to investigate why this is happening once we have
+            #     a solid repro case.
+            if isinstance(val, six.string_types):
+                logging.warning("JSONField decode error. Expected dictionary, "
+                                "got string for input '%s'" % val)
+                # For whatever reason, we may have gotten back
+                val = json.loads(val, encoding=settings.DEFAULT_CHARSET)
+        except ValueError:
+            # There's probably embedded unicode markers (like u'foo') in the
+            # string. We have to eval it.
+            try:
+                val = literal_eval(val)
+            except Exception as e:
+                logging.error('Failed to eval JSONField data "%r": %s'
+                              % (val, e))
+                val = {}
+
+            if isinstance(val, six.string_types):
+                logging.warning('JSONField decode error after literal_eval: '
+                                'Expected dictionary, got string: %r' % val)
+                val = {}
+
+        return val
+
+
+class CounterField(models.IntegerField):
+    """A field that provides atomic counter updating and smart initialization.
+
+    The CounterField makes it easy to atomically update an integer,
+    incrementing or decrementing it, without raise conditions or conflicts.
+    It can update a single instance at a time, or a batch of objects at once.
+
+    CounterField is useful for storing counts of objects, reducing the number
+    of queries performed. This requires that the calling code properly
+    increments or decrements at all the right times, of course.
+
+    This takes an optional ``initializer`` parameter that, if provided, can
+    be used to auto-populate the field the first time the model instance is
+    loaded, perhaps based on querying a number of related objects. The value
+    passed to ``initializer`` must be a function taking the model instance
+    as a parameter, and must return an integer.
+
+    The model instance will gain four new functions:
+
+        * ``increment_{field_name}`` - Atomically increment by one.
+        * ``decrement_{field_name}`` - Atomically decrement by one.
+        * ``reload_{field_name}`` - Reload the value in this instance from the
+                                    database.
+        * ``reinit_{field_name}`` - Re-initializes the stored field using the
+                                    initializer function.
+
+    The field on the class (not the instance) provides two functions for
+    batch-updating models:
+
+        * ``increment`` - Takes a queryset and increments this field for
+                          each object.
+        * ``decrement`` - Takes a queryset and decrements this field for
+                          each object.
+    """
+
+    def __init__(self, verbose_name=None, name=None,
+                 initializer=None, default=None, **kwargs):
+        kwargs.update({
+            'blank': True,
+            'null': True,
+        })
+
+        super(CounterField, self).__init__(verbose_name, name, default=default,
+                                           **kwargs)
+
+        self._initializer = initializer
+        self._locks = {}
+
+    def increment(self, queryset, increment_by=1):
+        """Increments this field on every object in the provided queryset."""
+        queryset.update(**{self.attname: F(self.attname) + increment_by})
+
+    def decrement(self, queryset, decrement_by=1):
+        """Decrements this field on every object in the provided queryset."""
+        queryset.update(**{self.attname: F(self.attname) - decrement_by})
+
+    def contribute_to_class(self, cls, name):
+        def _increment(model_instance, reload_object=True, increment_by=1):
+            """Increments this field by one."""
+            self.increment(cls.objects.filter(pk=model_instance.pk),
+                           increment_by)
+
+            if reload_object:
+                _reload(model_instance)
+
+        def _decrement(model_instance, reload_object=True, decrement_by=1):
+            """Decrements this field by one."""
+            self.decrement(cls.objects.filter(pk=model_instance.pk),
+                           decrement_by)
+
+            if reload_object:
+                _reload(model_instance)
+
+        def _reload(model_instance):
+            """Reloads the value in this instance from the database."""
+            q = cls.objects.filter(pk=model_instance.pk)
+            setattr(model_instance, self.attname,
+                    q.values(self.attname)[0][self.attname])
+
+        def _reinit(model_instance):
+            """Re-initializes the value in the database from the initializer."""
+            if not (model_instance.pk or self._initializer or
+                    six.callable(self._initializer)):
+                # We don't want to end up defaulting this to 0 if creating a
+                # new instance unless an initializer is provided. Instead,
+                # we'll want to handle this the next time the object is
+                # accessed.
+                return
+
+            if self._initializer and six.callable(self._initializer):
+                self._locks[model_instance] = 1
+                value = self._initializer(model_instance)
+                del self._locks[model_instance]
+            else:
+                value = 0
+
+            setattr(model_instance, self.attname, value)
+
+            if model_instance.pk:
+                model_instance.save()
+
+        super(CounterField, self).contribute_to_class(cls, name)
+
+        setattr(cls, 'increment_%s' % self.name, _increment)
+        setattr(cls, 'decrement_%s' % self.name, _decrement)
+        setattr(cls, 'reload_%s' % self.name, _reload)
+        setattr(cls, 'reinit_%s' % self.name, _reinit)
+        setattr(cls, self.attname, self)
+
+        models.signals.post_init.connect(self._post_init, sender=cls)
+
+    def _post_init(self, instance=None, **kwargs):
+        if not instance or instance in self._locks:
+            # Prevent the possibility of recursive lookups where this
+            # same CounterField on this same instance tries to initialize
+            # more than once. In this case, this will have the updated
+            # value shortly.
+            return
+
+        value = self.value_from_object(instance)
+
+        if value is None:
+            reinit = getattr(instance, 'reinit_%s' % self.name)
+            reinit()
diff --git a/djblets/db/managers.py b/djblets/db/managers.py
new file mode 100644
index 0000000000000000000000000000000000000000..31897d7640ab89a3ca694c37677e416c5142cdb7
--- /dev/null
+++ b/djblets/db/managers.py
@@ -0,0 +1,53 @@
+#
+# managers.py -- Managers for Django database models.
+#
+# Copyright (c) 2007-2013  Beanbag, Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+
+from __future__ import unicode_literals
+
+from django.db import models, IntegrityError
+
+
+class ConcurrencyManager(models.Manager):
+    """
+    A class designed to work around database concurrency issues.
+    """
+    def get_or_create(self, **kwargs):
+        """
+        A wrapper around get_or_create that makes a final attempt to get
+        the object if the creation fails.
+
+        This helps with race conditions in the database where, between the
+        original get() and the create(), another process created the object,
+        causing us to fail. We'll then execute a get().
+
+        This is still prone to race conditions, but they're even more rare.
+        A delete() would have to happen before the unexpected create() but
+        before the get().
+        """
+        try:
+            return super(ConcurrencyManager, self).get_or_create(**kwargs)
+        except IntegrityError:
+            kwargs.pop('defaults', None)
+            return self.get(**kwargs)
diff --git a/djblets/db/query.py b/djblets/db/query.py
new file mode 100644
index 0000000000000000000000000000000000000000..acc34c0fe780b49e92faa5a3816afac603e97e74
--- /dev/null
+++ b/djblets/db/query.py
@@ -0,0 +1,16 @@
+from __future__ import unicode_literals
+
+from django.db.models.manager import Manager
+
+
+def get_object_or_none(klass, *args, **kwargs):
+    if isinstance(klass, Manager):
+        manager = klass
+        klass = manager.model
+    else:
+        manager = klass._default_manager
+
+    try:
+        return manager.get(*args, **kwargs)
+    except klass.DoesNotExist:
+        return None
diff --git a/djblets/db/validators.py b/djblets/db/validators.py
new file mode 100644
index 0000000000000000000000000000000000000000..1738488a079b1a4da3fc69f0745a75f371e4fb8f
--- /dev/null
+++ b/djblets/db/validators.py
@@ -0,0 +1,19 @@
+from __future__ import unicode_literals
+import json
+
+from django.core.exceptions import ValidationError
+from django.utils import six
+
+
+def validate_json(value):
+    """Validates content going into a JSONField.
+
+    This will raise a ValidationError if the value is a string
+    (representing a serialized JSON payload, possibly from the admin UI)
+    and cannot be loaded properly.
+    """
+    if isinstance(value, six.string_types):
+        try:
+            json.loads(value)
+        except ValueError as e:
+            raise ValidationError(unicode(e), code='invalid')
diff --git a/djblets/extensions/base.py b/djblets/extensions/base.py
index 2592332ae6ef15c12fa2c12ea6f77a7836a6121c..e57267bd72f258531d4d902d93545209d50de96a 100644
--- a/djblets/extensions/base.py
+++ b/djblets/extensions/base.py
@@ -27,7 +27,7 @@ from __future__ import unicode_literals
 
 from djblets.extensions.extension import Extension, ExtensionInfo
 from djblets.extensions.hooks import ExtensionHook, ExtensionHookPoint
-from djblets.extensions.manager import ExtensionManager, get_extension_managers
+from djblets.extensions.manager import ExtensionManager
 from djblets.extensions.settings import Settings
 
 
diff --git a/djblets/extensions/manager.py b/djblets/extensions/manager.py
index ff21935667873775c7c9297a68a509467b691d96..85e78c3cc5698fd08ee4c45d42500cb3f69f1c01 100644
--- a/djblets/extensions/manager.py
+++ b/djblets/extensions/manager.py
@@ -48,6 +48,7 @@ from django.utils.module_loading import module_has_submodule
 from django_evolution.management.commands.evolve import Command as Evolution
 from setuptools.command import easy_install
 
+from djblets.cache.backend import make_cache_key
 from djblets.extensions.errors import (EnablingExtensionError,
                                        InstallExtensionError,
                                        InvalidExtensionError)
@@ -55,8 +56,7 @@ from djblets.extensions.extension import ExtensionInfo
 from djblets.extensions.models import RegisteredExtension
 from djblets.extensions.signals import (extension_initialized,
                                         extension_uninitialized)
-from djblets.util.misc import make_cache_key
-from djblets.util.urlresolvers import DynamicURLResolver
+from djblets.urls.resolvers import DynamicURLResolver
 
 
 class ExtensionManager(object):
diff --git a/djblets/extensions/models.py b/djblets/extensions/models.py
index 210f3237c6fd6e5cd3914fb23fce993017930341..21ccd163877a89e7fd6a4799dc34a831e4585358 100644
--- a/djblets/extensions/models.py
+++ b/djblets/extensions/models.py
@@ -28,8 +28,8 @@ from __future__ import unicode_literals
 from django.db import models
 from django.utils.encoding import python_2_unicode_compatible
 
+from djblets.db.fields import JSONField
 from djblets.extensions.errors import InvalidExtensionError
-from djblets.util.fields import JSONField
 
 
 @python_2_unicode_compatible
diff --git a/djblets/extensions/resources.py b/djblets/extensions/resources.py
index b4cbba3546fbc77b0c14da58930fd769a2e7ab5c..375b0c1f2e5c11cbfa62f34a846f2ec66cbe7279 100644
--- a/djblets/extensions/resources.py
+++ b/djblets/extensions/resources.py
@@ -7,7 +7,7 @@ from djblets.extensions.errors import (DisablingExtensionError,
                                        EnablingExtensionError,
                                        InvalidExtensionError)
 from djblets.extensions.models import RegisteredExtension
-from djblets.util.urlresolvers import DynamicURLResolver
+from djblets.urls.resolvers import DynamicURLResolver
 from djblets.webapi.decorators import (webapi_login_required,
                                        webapi_permission_required,
                                        webapi_request_fields)
diff --git a/djblets/extensions/staticfiles.py b/djblets/extensions/staticfiles.py
index 4b81c34cef952b0e49d61764e54cba04521addd4..7f70e0cd48fbff2c4132feace880c1f86cc3bcc0 100644
--- a/djblets/extensions/staticfiles.py
+++ b/djblets/extensions/staticfiles.py
@@ -5,7 +5,7 @@ import os
 from django.contrib.staticfiles.finders import BaseFinder
 from django.contrib.staticfiles.utils import get_files
 from django.core.files.storage import FileSystemStorage
-from pkg_resources import resource_exists, resource_filename
+from pkg_resources import resource_filename
 
 from djblets.extensions.manager import get_extension_managers
 
diff --git a/djblets/feedview/tests.py b/djblets/feedview/tests.py
index 1009105dbb2fa0e2ec5fb0e1bdf2ff645f93a1d7..caa544b6f5b0afbd605400d4d8efa1d150e00840 100644
--- a/djblets/feedview/tests.py
+++ b/djblets/feedview/tests.py
@@ -24,7 +24,7 @@
 
 from __future__ import unicode_literals
 
-from djblets.util.testing import TestCase
+from djblets.testing.testcases import TestCase
 
 
 class FeedViewTests(TestCase):
diff --git a/djblets/feedview/views.py b/djblets/feedview/views.py
index e93809bc7da2d3354da4da3df8c605efc7f86e95..ffd3f69ccbf7d3d783acbaf3dc0c6056f9d468ea 100644
--- a/djblets/feedview/views.py
+++ b/djblets/feedview/views.py
@@ -5,10 +5,10 @@ from django.shortcuts import render_to_response
 from django.template.context import RequestContext
 from django.template.loader import render_to_string
 
+from djblets.cache.backend import cache_memoize
 from djblets.util.compat.six.moves import http_client
 from djblets.util.compat.six.moves.urllib.error import URLError
 from djblets.util.compat.six.moves.urllib.request import urlopen
-from djblets.util.misc import cache_memoize
 
 
 DEFAULT_EXPIRATION = 2 * 24 * 60 * 60 # 2 days
diff --git a/djblets/forms/fields.py b/djblets/forms/fields.py
new file mode 100644
index 0000000000000000000000000000000000000000..ba251992135a596036171d5d334f15582a5c7fe0
--- /dev/null
+++ b/djblets/forms/fields.py
@@ -0,0 +1,13 @@
+from __future__ import unicode_literals
+
+from django import forms
+import pytz
+
+
+TIMEZONE_CHOICES = tuple(zip(pytz.common_timezones, pytz.common_timezones))
+
+
+class TimeZoneField(forms.ChoiceField):
+    """A form field that only allows pytz common timezones as the choices."""
+    def __init__(self, choices=TIMEZONE_CHOICES, *args, **kwargs):
+        super(TimeZoneField, self).__init__(choices, *args, **kwargs)
diff --git a/djblets/gravatars/__init__.py b/djblets/gravatars/__init__.py
index d8c7d8f0bddff8dc47527ac3c01f276ab2d342b4..a66ebce5bc49220fa7554a3fc06c7a67750f5025 100644
--- a/djblets/gravatars/__init__.py
+++ b/djblets/gravatars/__init__.py
@@ -60,8 +60,6 @@ def get_gravatar_url_for_email(request, email, size=None):
 
 
 def get_gravatar_url(request, user, size=None):
-    from django.conf import settings
-
     if user.is_anonymous() or not user.email:
         return ""
 
diff --git a/djblets/siteconfig/context_processors.py b/djblets/siteconfig/context_processors.py
index eb6ed70605062f9926fac9411d6ceb4c5566e5b3..19955091207ec903ccf09f191c24348455ece58b 100644
--- a/djblets/siteconfig/context_processors.py
+++ b/djblets/siteconfig/context_processors.py
@@ -25,6 +25,8 @@
 
 from __future__ import unicode_literals
 
+from django.conf import settings
+
 from djblets.siteconfig.models import SiteConfiguration
 
 
@@ -36,3 +38,7 @@ def siteconfig(request):
         return {'siteconfig': SiteConfiguration.objects.get_current()}
     except:
         return {'siteconfig': None}
+
+
+def settings_vars(request):
+    return {'settings': settings}
diff --git a/djblets/siteconfig/django_settings.py b/djblets/siteconfig/django_settings.py
index ff209f649ed5bb64c9ab58f33ada3cca4a9c3f18..e2aed1cea3a82a3df861cd5d0b1ae5c556ac2da9 100644
--- a/djblets/siteconfig/django_settings.py
+++ b/djblets/siteconfig/django_settings.py
@@ -30,7 +30,7 @@ from django.contrib.staticfiles.storage import staticfiles_storage
 from django.core.cache import DEFAULT_CACHE_ALIAS
 from django.utils import six, timezone
 
-from djblets.util.cache import normalize_cache_backend
+from djblets.cache.backend_compat import normalize_cache_backend
 
 
 def _set_cache_backend(settings, key, value):
diff --git a/djblets/siteconfig/models.py b/djblets/siteconfig/models.py
index e1618e93ce35e29eadb9db50691a47c10a45918f..9c69bba47c1d8033fb2c8d993696606bfa70e0d0 100644
--- a/djblets/siteconfig/models.py
+++ b/djblets/siteconfig/models.py
@@ -30,8 +30,8 @@ from django.core.cache import cache
 from django.db import models
 from django.utils.encoding import python_2_unicode_compatible
 
+from djblets.db.fields import JSONField
 from djblets.siteconfig.managers import SiteConfigurationManager
-from djblets.util.fields import JSONField
 
 
 _DEFAULTS = {}
diff --git a/djblets/siteconfig/tests.py b/djblets/siteconfig/tests.py
index a1c216a5d399756225b9cf847489df949ed5094b..fa7546a7baa5d145f5490e380e52688598dc3773 100644
--- a/djblets/siteconfig/tests.py
+++ b/djblets/siteconfig/tests.py
@@ -32,7 +32,7 @@ from djblets.siteconfig.django_settings import (apply_django_settings,
                                                 cache_settings_map,
                                                 mail_settings_map)
 from djblets.siteconfig.models import SiteConfiguration
-from djblets.util.testing import TestCase
+from djblets.testing.testcases import TestCase
 
 
 class SiteConfigTest(TestCase):
diff --git a/djblets/testing/testcases.py b/djblets/testing/testcases.py
index f589d59ede8200dd06a09f494dc1cc7f1424572b..33aa4346d06d7026eaec21d1883421daa6f7e3cb 100644
--- a/djblets/testing/testcases.py
+++ b/djblets/testing/testcases.py
@@ -29,12 +29,10 @@ from __future__ import print_function, unicode_literals
 import socket
 import threading
 
-from django.conf import settings
 from django.core.handlers.wsgi import WSGIHandler
 from django.core.servers import basehttp
 from django.template import Node
 from django.test import testcases
-from nose import SkipTest
 
 
 class StubNodeList(Node):
diff --git a/djblets/urls/context_processors.py b/djblets/urls/context_processors.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b9e2a72a531293270fd206c48a423149b1d1cff
--- /dev/null
+++ b/djblets/urls/context_processors.py
@@ -0,0 +1,12 @@
+from __future__ import unicode_literals
+
+from django.conf import settings
+
+
+def site_root(request):
+    """
+    Exposes a SITE_ROOT variable in templates. This assumes that the
+    project has been configured with a SITE_ROOT settings variable and
+    proper support for basing the installation in a subdirectory.
+    """
+    return {'SITE_ROOT': settings.SITE_ROOT}
diff --git a/djblets/urls/decorators.py b/djblets/urls/decorators.py
new file mode 100644
index 0000000000000000000000000000000000000000..51f08c971acebc204c86736832fae79fc51159dd
--- /dev/null
+++ b/djblets/urls/decorators.py
@@ -0,0 +1,20 @@
+from django.conf import settings
+
+from djblets.util.decorators import simple_decorator
+
+
+@simple_decorator
+def add_root_url(url_func):
+    """Decorates a function that returns a URL in order to add the SITE_ROOT."""
+    def _add_root(*args, **kwargs):
+        url = url_func(*args, **kwargs)
+
+        if url[0] != '/':
+            raise ValueError('Returned URL is not absolute')
+
+        if hasattr(settings, 'SITE_ROOT'):
+            return '%s%s' % (settings.SITE_ROOT, url[1:])
+        else:
+            return url
+
+    return _add_root
diff --git a/djblets/urls/patterns.py b/djblets/urls/patterns.py
new file mode 100644
index 0000000000000000000000000000000000000000..0f57b85c24cec47d8afdfffde6ef1642ea046455
--- /dev/null
+++ b/djblets/urls/patterns.py
@@ -0,0 +1,25 @@
+from __future__ import unicode_literals
+
+from django.conf.urls import url
+from django.core.urlresolvers import RegexURLPattern
+from django.views.decorators.cache import never_cache
+
+
+def never_cache_patterns(prefix, *args):
+    """
+    Prevents any included URLs from being cached by the browser.
+
+    It's sometimes desirable not to allow browser caching for a set of URLs.
+    This can be used just like patterns().
+    """
+    pattern_list = []
+    for t in args:
+        if isinstance(t, (list, tuple)):
+            t = url(prefix=prefix, *t)
+        elif isinstance(t, RegexURLPattern):
+            t.add_prefix(prefix)
+
+        t._callback = never_cache(t.callback)
+        pattern_list.append(t)
+
+    return pattern_list
diff --git a/djblets/urls/resolvers.py b/djblets/urls/resolvers.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d73f58dbf56a50adf852fabccb6afa18dd4437b
--- /dev/null
+++ b/djblets/urls/resolvers.py
@@ -0,0 +1,118 @@
+from __future__ import unicode_literals
+
+from django.core.urlresolvers import (RegexURLResolver, clear_url_caches,
+                                      get_resolver)
+
+
+class DynamicURLResolver(RegexURLResolver):
+    """A URL resolver that allows for dynamically altering URL patterns.
+
+    A standard RegexURLResolver expects that a list of URL patterns will
+    be set once and never again change. In most applications, this is a
+    good assumption. However, some that are more specialized may need
+    to be able to swap in URL patterns dynamically. For example, those
+    that can plug in third-party extensions.
+
+    DynamicURLResolver makes it easy to add and remove URL patterns. Any
+    time the list of URL patterns changes, they'll be immediately available
+    for all URL resolution and reversing.
+
+    The usage is very simple::
+
+        dynamic_patterns = DynamicURLResolver()
+        urlpatterns = patterns('', dynamic_patterns)
+
+        dynamic_patterns.add_patterns([
+            url(...),
+            url(...),
+        ])
+
+    DynamicURLResolver will handle managing all the lookup caches to ensure
+    that there won't be any stale entries affecting any dynamic URL patterns.
+    """
+    def __init__(self, regex=r'', app_name=None, namespace=None):
+        super(DynamicURLResolver, self).__init__(regex=regex,
+                                                 urlconf_name=[],
+                                                 app_name=app_name,
+                                                 namespace=namespace)
+        self._resolver_chain = None
+
+    @property
+    def url_patterns(self):
+        """Returns the current list of URL patterns.
+
+        This is a simplified version of RegexURLResolver.url_patterns that
+        simply returns the preset list of patterns. Unlike the original
+        function, we don't care if the list is empty.
+        """
+        # Internally, urlconf_module represents whatever we're accessing
+        # for the list of URLs. It can be a list, or it can be something
+        # with a 'urlpatterns' property (intended for a urls.py). However,
+        # we force this to be a list in the constructor (as urlconf_name,
+        # which gets stored as urlconf_module), so we know we can just
+        # return it as-is.
+        return self.urlconf_module
+
+    def add_patterns(self, patterns):
+        """Adds a list of URL patterns.
+
+        The patterns will be made immediately available for use for any
+        lookups or reversing.
+        """
+        self.url_patterns.extend(patterns)
+        self._clear_cache()
+
+    def remove_patterns(self, patterns):
+        """Removes a list of URL patterns.
+
+        These patterns will no longer be able to be looked up or reversed.
+        """
+        for pattern in patterns:
+            self.url_patterns.remove(pattern)
+
+        self._clear_cache()
+
+    def _clear_cache(self):
+        """Clears the internal resolver caches.
+
+        This will clear all caches for this resolver and every parent
+        of this resolver, in order to ensure that the next lookup or reverse
+        will result in a lookup in this resolver. By default, every
+        RegexURLResolver in Django will cache all results from its children.
+
+        We take special care to only clear the caches of the resolvers in
+        our parent chain.
+        """
+        for resolver in self.resolver_chain:
+            resolver._reverse_dict.clear()
+            resolver._namespace_dict.clear()
+            resolver._app_dict.clear()
+
+        clear_url_caches()
+
+    @property
+    def resolver_chain(self):
+        """Returns every RegexURLResolver between here and the root.
+
+        The list of resolvers is cached in order to prevent having to locate
+        the resolvers more than once.
+        """
+        if self._resolver_chain is None:
+            self._resolver_chain = \
+                self._find_resolver_chain(get_resolver(None))
+
+        return self._resolver_chain
+
+    def _find_resolver_chain(self, resolver):
+        if resolver == self:
+            return [resolver]
+
+        for url_pattern in resolver.url_patterns:
+            if isinstance(url_pattern, RegexURLResolver):
+                resolvers = self._find_resolver_chain(url_pattern)
+
+                if resolvers:
+                    resolvers.append(resolver)
+                    return resolvers
+
+        return []
diff --git a/djblets/urls/root.py b/djblets/urls/root.py
new file mode 100644
index 0000000000000000000000000000000000000000..e4a11005f7a224d51847eda27a978f3bf72a28c7
--- /dev/null
+++ b/djblets/urls/root.py
@@ -0,0 +1,51 @@
+#
+# rooturl.py -- URL patterns for rooted sites.
+#
+# Copyright (c) 2007-2010  Christian Hammond
+# Copyright (c) 2010-2013  Beanbag, Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# 'Software'), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.conf.urls import patterns, include, handler404, handler500
+from django.core.exceptions import ImproperlyConfigured
+
+
+# Ensures that we can run nose on this without needing to set SITE_ROOT.
+# Also serves to let people know if they set one variable without the other.
+if hasattr(settings, 'SITE_ROOT'):
+    if not hasattr(settings, 'SITE_ROOT_URLCONF'):
+        raise ImproperlyConfigured('SITE_ROOT_URLCONF must be set when '
+                                   'using SITE_ROOT')
+
+    urlpatterns = patterns('',
+        (r'^%s' % settings.SITE_ROOT[1:], include(settings.SITE_ROOT_URLCONF)),
+    )
+else:
+    urlpatterns = None
+
+
+__all__ = [
+    'handler404',
+    'handler500',
+    'urlpatterns',
+]
diff --git a/djblets/util/cache.py b/djblets/util/cache.py
index 94c443c319ae4a891d92b1e7a1bc1ff7889cb4a6..722319d4c814bfe034113183c37677a7f907e5dd 100644
--- a/djblets/util/cache.py
+++ b/djblets/util/cache.py
@@ -1,57 +1,11 @@
 from __future__ import unicode_literals
+import warnings
 
-import logging
+from djblets.cache.backend_compat import normalize_cache_backend
 
-from django.core.cache import (DEFAULT_CACHE_ALIAS, parse_backend_uri,
-                               InvalidCacheBackendError)
 
+warnings.warn('djblets.util.cache is deprecated. Use '
+              'djblets.cache.backend_compat.', DeprecationWarning)
 
-BACKEND_CLASSES = {
-    'db': 'db.DatabaseCache',
-    'dummy': 'dummy.DummyCache',
-    'file': 'filebased.FileBasedCache',
-    'locmem': 'locmem.LocMemCache',
-    'memcached': 'memcached.CacheClass',
-}
 
-
-def normalize_cache_backend(cache_backend):
-    """Returns a new-style CACHES dictionary from any given cache_backend.
-
-    Django has supported two formats for a cache backend. The old-style
-    CACHE_BACKEND string, and the new-style CACHES dictionary.
-
-    This function will accept either as input and return a cahe backend in the
-    form of a CACHES dictionary as a result. The result won't be a full-on
-    CACHES, with named cache entries inside. Rather, it will be a cache entry.
-
-    If a CACHES dictionary is passed, the "default" cache will be the result.
-    """
-    if not cache_backend:
-        return {}
-
-    if isinstance(cache_backend, dict):
-        if DEFAULT_CACHE_ALIAS in cache_backend:
-            return cache_backend[DEFAULT_CACHE_ALIAS]
-
-        return {}
-
-    try:
-        engine, host, params = parse_backend_uri(cache_backend)
-    except InvalidCacheBackendError as e:
-        logging.error('Invalid cache backend (%s) found while loading '
-                      'siteconfig: %s' % (cache_backend, e))
-        return {}
-
-    if engine in BACKEND_CLASSES:
-        engine = 'django.core.cache.backends.%s' % BACKEND_CLASSES[engine]
-    else:
-        engine = '%s.CacheClass' % engine
-
-    defaults = {
-        'BACKEND': engine,
-        'LOCATION': host,
-    }
-    defaults.update(params)
-
-    return defaults
+__all__ = ['normalize_cache_backend']
diff --git a/djblets/util/context_processors.py b/djblets/util/context_processors.py
index 9c962bc50a3808d46b141c20ab4830e59e8b199a..a2937e6269c6e2343027432d2e2810474dcca062 100644
--- a/djblets/util/context_processors.py
+++ b/djblets/util/context_processors.py
@@ -25,45 +25,21 @@
 #
 
 from __future__ import unicode_literals
+import warnings
 
-from django.conf import settings
+from djblets.siteconfig.context_processors import settings_vars as settingsVars
+from djblets.urls.context_processors import site_root as siteRoot
+from djblets.cache.context_processors import (ajax_serial as ajaxSerial,
+                                              media_serial as mediaSerial)
 
 
-def settingsVars(request):
-    return {'settings': settings}
+warnings.warn('djblets.util.context_processors is deprecated',
+              DeprecationWarning)
 
 
-def siteRoot(request):
-    """
-    Exposes a SITE_ROOT variable in templates. This assumes that the
-    project has been configured with a SITE_ROOT settings variable and
-    proper support for basing the installation in a subdirectory.
-    """
-    return {'SITE_ROOT': settings.SITE_ROOT}
-
-
-def mediaSerial(request):
-    """
-    Exposes a media serial number that can be appended to a media filename
-    in order to make a URL that can be cached forever without fear of change.
-    The next time the file is updated and the server is restarted, a new
-    path will be accessed and cached.
-
-    This returns the value of settings.MEDIA_SERIAL, which must either be
-    set manually or ideally should be set to the value of
-    djblets.util.misc.generate_media_serial().
-    """
-    return {'MEDIA_SERIAL': getattr(settings, "MEDIA_SERIAL", "")}
-
-
-def ajaxSerial(request):
-    """
-    Exposes a serial number that can be appended to filenames involving
-    dynamic loads of URLs in order to make a URL that can be cached forever
-    without fear of change.
-
-    This returns the value of settings.AJAX_SERIAL, which must either be
-    set manually or ideally should be set to the value of
-    djblets.util.misc.generate_ajax_serial().
-    """
-    return {'AJAX_SERIAL': getattr(settings, "AJAX_SERIAL", "")}
+__all__ = [
+    'ajaxSerial',
+    'mediaSerial',
+    'settingsVars',
+    'siteRoot',
+]
diff --git a/djblets/util/db.py b/djblets/util/db.py
index ff78b1623cf1078de40a80c45eab6cebf6c5af75..78050cf0fe5b85c31155199de1059ae258023833 100644
--- a/djblets/util/db.py
+++ b/djblets/util/db.py
@@ -1,52 +1,10 @@
-#
-# db.py -- Database utilities.
-#
-# Copyright (c) 2007-2009  David Trowbridge
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-
 from __future__ import unicode_literals
+import warnings
 
-from django.db import models, IntegrityError
+from djblets.db.managers import ConcurrencyManager
 
 
-class ConcurrencyManager(models.Manager):
-    """
-    A class designed to work around database concurrency issues.
-    """
-    def get_or_create(self, **kwargs):
-        """
-        A wrapper around get_or_create that makes a final attempt to get
-        the object if the creation fails.
+warnings.warn('djblets.util.db is deprecated', DeprecationWarning)
 
-        This helps with race conditions in the database where, between the
-        original get() and the create(), another process created the object,
-        causing us to fail. We'll then execute a get().
 
-        This is still prone to race conditions, but they're even more rare.
-        A delete() would have to happen before the unexpected create() but
-        before the get().
-        """
-        try:
-            return super(ConcurrencyManager, self).get_or_create(**kwargs)
-        except IntegrityError:
-            kwargs.pop('defaults', None)
-            return self.get(**kwargs)
+__all__ = ['ConcurrencyManager']
diff --git a/djblets/util/dbevolution.py b/djblets/util/dbevolution.py
index d7fb5d0bc870180b4808b77abb6f24fd19a31cd5..64e91db2c2167aac39de67f7537948310a18f321 100644
--- a/djblets/util/dbevolution.py
+++ b/djblets/util/dbevolution.py
@@ -1,60 +1,11 @@
-#
-# dbevolution.py -- Helpers for database evolutions
-#
-# Copyright (c) 2008-2009  Christian Hammond
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-
 from __future__ import unicode_literals
+import warnings
 
-from django_evolution.mutations import BaseMutation
-
-
-class FakeChangeFieldType(BaseMutation):
-    """
-    Changes the type of the field to a similar type.
-    This is intended only when the new type is really a version of the
-    old type, such as a subclass of that Field object. The two fields
-    should be compatible or there could be migration issues.
-    """
-    def __init__(self, model_name, field_name, new_type):
-        self.model_name = model_name
-        self.field_name = field_name
-        self.new_type = new_type
+from djblets.db.evolution import FakeChangeFieldType
 
-    def __repr__(self):
-        return "FakeChangeFieldType('%s', '%s', '%s')" % \
-            (self.model_name, self.field_name, self.new_type)
 
-    def simulate(self, app_label, proj_sig):
-        app_sig = proj_sig[app_label]
-        model_sig = app_sig[self.model_name]
-        field_dict = model_sig['fields']
-        field_sig = field_dict[self.field_name]
+warnings.warn('djblets.util.dbevolution is deprecated. Use '
+              'djblets.db.evolution instead.', DeprecationWarning)
 
-        field_sig['field_type'] = self.new_type
 
-    def mutate(self, app_label, proj_sig):
-        # We can just call simulate, since it does the same thing.
-        # We're not actually generating SQL, but rather tricking
-        # Django Evolution.
-        self.simulate(app_label, proj_sig)
-        return ""
+__all__ = ['FakeChangeFieldType']
diff --git a/djblets/util/decorators.py b/djblets/util/decorators.py
index efcb5cd803dde5dc6726369ff676f6f624b525c6..a1efa696cf87b000f77ecac74d4ccfdeefb5bb1a 100644
--- a/djblets/util/decorators.py
+++ b/djblets/util/decorators.py
@@ -26,8 +26,8 @@
 #
 
 from __future__ import unicode_literals
-
 from inspect import getargspec
+import warnings
 
 from django import template
 from django.conf import settings
@@ -229,4 +229,7 @@ def root_url(url_func):
         else:
             return url
 
+    warnings.warn('djblets.util.decorators.root_url is deprecated.',
+                  DeprecationWarning)
+
     return _add_root
diff --git a/djblets/util/fields.py b/djblets/util/fields.py
index 2840dac50d4562e5c7e00abc4d49133d65f90248..46e87e7456f980ccd5b6b5818cc671b3f5d1f47a 100644
--- a/djblets/util/fields.py
+++ b/djblets/util/fields.py
@@ -1,373 +1,22 @@
-#
-# fields.py -- Model fields.
-#
-# Copyright (c) 2007-2008  Christian Hammond
-# Copyright (c) 2007-2008  David Trowbridge
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
 from __future__ import unicode_literals
+import warnings
 
-import base64
-import json
-import logging
-from datetime import datetime
-from ast import literal_eval
-
-from django.conf import settings
-from django.core.exceptions import ValidationError
-from django.core.serializers.json import DjangoJSONEncoder
-from django.db import models
-from django.db.models import F
-from django.utils import six
-from django.utils.encoding import smart_unicode
-
-from djblets.util.dates import get_tz_aware_utcnow
-
-
-class Base64DecodedValue(str):
-    """
-    A subclass of string that can be identified by Base64Field, in order
-    to prevent double-encoding or double-decoding.
-    """
-    pass
-
-
-class Base64FieldCreator(object):
-    def __init__(self, field):
-        self.field = field
-
-    def __set__(self, obj, value):
-        pk_val = obj._get_pk_val(obj.__class__._meta)
-        pk_set = pk_val is not None and smart_unicode(pk_val) != ''
-
-        if (isinstance(value, Base64DecodedValue) or not pk_set):
-            obj.__dict__[self.field.name] = base64.encodestring(value)
-        else:
-            obj.__dict__[self.field.name] = value
-
-        setattr(obj, "%s_initted" % self.field.name, True)
-
-    def __get__(self, obj, type=None):
-        if obj is None:
-            raise AttributeError('Can only be accessed via an instance.')
-
-        value = obj.__dict__[self.field.name]
-
-        if value is None:
-            return None
-        else:
-            return Base64DecodedValue(base64.decodestring(value))
-
-
-class Base64Field(models.TextField):
-    """
-    A subclass of TextField that encodes its data as base64 in the database.
-    This is useful if you're dealing with unknown encodings and must guarantee
-    that no modifications to the text occurs and that you can read/write
-    the data in any database with any encoding.
-    """
-    serialize_to_string = True
-
-    def contribute_to_class(self, cls, name):
-        super(Base64Field, self).contribute_to_class(cls, name)
-        setattr(cls, self.name, Base64FieldCreator(self))
-
-    def get_db_prep_value(self, value, connection=None, prepared=False):
-        if isinstance(value, Base64DecodedValue):
-            value = base64.encodestring(value)
-
-        return value
-
-    def save_form_data(self, instance, data):
-        setattr(instance, self.name, Base64DecodedValue(data))
-
-    def to_python(self, value):
-        if isinstance(value, Base64DecodedValue):
-            return value
-        else:
-            return Base64DecodedValue(base64.decodestring(value))
-
-    def value_to_string(self, obj):
-        value = self._get_val_from_obj(obj)
-
-        if isinstance(value, Base64DecodedValue):
-            return base64.encodestring(value)
-        else:
-            return value
-
-
-class ModificationTimestampField(models.DateTimeField):
-    """
-    A subclass of DateTimeField that only auto-updates the timestamp when
-    updating an existing object or when the value of the field is None. This
-    specialized field is equivalent to DateTimeField's auto_now=True, except
-    it allows for custom timestamp values (needed for
-    serialization/deserialization).
-    """
-    def __init__(self, verbose_name=None, name=None, **kwargs):
-        kwargs.update({
-            'editable': False,
-            'blank': True,
-        })
-        models.DateTimeField.__init__(self, verbose_name, name, **kwargs)
-
-    def pre_save(self, model, add):
-        if not add or getattr(model, self.attname) is None:
-
-            if settings.USE_TZ:
-                value = get_tz_aware_utcnow()
-            else:
-                value = datetime.now()
-
-            setattr(model, self.attname, value)
-            return value
-
-        return super(ModificationTimestampField, self).pre_save(model, add)
-
-    def get_internal_type(self):
-        return "DateTimeField"
-
-
-def validate_json(value):
-    """Validates content going into a JSONField.
-
-    This will raise a ValidationError if the value is a string
-    (representing a serialized JSON payload, possibly from the admin UI)
-    and cannot be loaded properly.
-    """
-    if isinstance(value, six.string_types):
-        try:
-            json.loads(value)
-        except ValueError as e:
-            raise ValidationError(unicode(e), code='invalid')
-
-
-class JSONField(models.TextField):
-    """
-    A field for storing JSON-encoded data. The data is accessible as standard
-    Python data types and is transparently encoded/decoded to/from a JSON
-    string in the database.
-    """
-    serialize_to_string = True
-    default_validators = [validate_json]
-
-    def __init__(self, verbose_name=None, name=None,
-                 encoder=DjangoJSONEncoder(), **kwargs):
-        blank = kwargs.pop('blank', True)
-        models.TextField.__init__(self, verbose_name, name, blank=blank,
-                                  **kwargs)
-        self.encoder = encoder
-
-    def contribute_to_class(self, cls, name):
-        def get_json(model_instance):
-            return self.dumps(getattr(model_instance, self.attname, None))
-
-        def set_json(model_instance, json):
-            setattr(model_instance, self.attname, self.loads(json))
-
-        super(JSONField, self).contribute_to_class(cls, name)
-
-        setattr(cls, "get_%s_json" % self.name, get_json)
-        setattr(cls, "set_%s_json" % self.name, set_json)
-
-        models.signals.post_init.connect(self.post_init, sender=cls)
-
-    def pre_save(self, model_instance, add):
-        return self.dumps(getattr(model_instance, self.attname, None))
-
-    def post_init(self, instance=None, **kwargs):
-        value = self.value_from_object(instance)
-
-        if value:
-            value = self.loads(value)
-        else:
-            value = {}
-
-        setattr(instance, self.attname, value)
-
-    def get_db_prep_save(self, value, *args, **kwargs):
-        if not isinstance(value, six.string_types):
-            value = self.dumps(value)
-
-        return super(JSONField, self).get_db_prep_save(value, *args, **kwargs)
-
-    def value_to_string(self, obj):
-        return self.dumps(self.value_from_object(obj))
-
-    def dumps(self, data):
-        if isinstance(data, six.string_types):
-            return data
-        else:
-            return self.encoder.encode(data)
-
-    def loads(self, val):
-        try:
-            val = json.loads(val, encoding=settings.DEFAULT_CHARSET)
-
-            # XXX We need to investigate why this is happening once we have
-            #     a solid repro case.
-            if isinstance(val, six.string_types):
-                logging.warning("JSONField decode error. Expected dictionary, "
-                                "got string for input '%s'" % val)
-                # For whatever reason, we may have gotten back
-                val = json.loads(val, encoding=settings.DEFAULT_CHARSET)
-        except ValueError:
-            # There's probably embedded unicode markers (like u'foo') in the
-            # string. We have to eval it.
-            try:
-                val = literal_eval(val)
-            except Exception as e:
-                logging.error('Failed to eval JSONField data "%r": %s'
-                              % (val, e))
-                val = {}
-
-            if isinstance(val, six.string_types):
-                logging.warning('JSONField decode error after literal_eval: '
-                                'Expected dictionary, got string: %r' % val)
-                val = {}
-
-        return val
-
-
-class CounterField(models.IntegerField):
-    """A field that provides atomic counter updating and smart initialization.
-
-    The CounterField makes it easy to atomically update an integer,
-    incrementing or decrementing it, without raise conditions or conflicts.
-    It can update a single instance at a time, or a batch of objects at once.
-
-    CounterField is useful for storing counts of objects, reducing the number
-    of queries performed. This requires that the calling code properly
-    increments or decrements at all the right times, of course.
-
-    This takes an optional ``initializer`` parameter that, if provided, can
-    be used to auto-populate the field the first time the model instance is
-    loaded, perhaps based on querying a number of related objects. The value
-    passed to ``initializer`` must be a function taking the model instance
-    as a parameter, and must return an integer.
-
-    The model instance will gain four new functions:
-
-        * ``increment_{field_name}`` - Atomically increment by one.
-        * ``decrement_{field_name}`` - Atomically decrement by one.
-        * ``reload_{field_name}`` - Reload the value in this instance from the
-                                    database.
-        * ``reinit_{field_name}`` - Re-initializes the stored field using the
-                                    initializer function.
-
-    The field on the class (not the instance) provides two functions for
-    batch-updating models:
-
-        * ``increment`` - Takes a queryset and increments this field for
-                          each object.
-        * ``decrement`` - Takes a queryset and decrements this field for
-                          each object.
-    """
-
-    def __init__(self, verbose_name=None, name=None,
-                 initializer=None, default=None, **kwargs):
-        kwargs.update({
-            'blank': True,
-            'null': True,
-        })
-
-        super(CounterField, self).__init__(verbose_name, name, default=default,
-                                           **kwargs)
-
-        self._initializer = initializer
-        self._locks = {}
-
-    def increment(self, queryset, increment_by=1):
-        """Increments this field on every object in the provided queryset."""
-        queryset.update(**{self.attname: F(self.attname) + increment_by})
-
-    def decrement(self, queryset, decrement_by=1):
-        """Decrements this field on every object in the provided queryset."""
-        queryset.update(**{self.attname: F(self.attname) - decrement_by})
-
-    def contribute_to_class(self, cls, name):
-        def _increment(model_instance, reload_object=True, increment_by=1):
-            """Increments this field by one."""
-            self.increment(cls.objects.filter(pk=model_instance.pk),
-                           increment_by)
-
-            if reload_object:
-                _reload(model_instance)
-
-        def _decrement(model_instance, reload_object=True, decrement_by=1):
-            """Decrements this field by one."""
-            self.decrement(cls.objects.filter(pk=model_instance.pk),
-                           decrement_by)
-
-            if reload_object:
-                _reload(model_instance)
-
-        def _reload(model_instance):
-            """Reloads the value in this instance from the database."""
-            q = cls.objects.filter(pk=model_instance.pk)
-            setattr(model_instance, self.attname,
-                    q.values(self.attname)[0][self.attname])
-
-        def _reinit(model_instance):
-            """Re-initializes the value in the database from the initializer."""
-            if not (model_instance.pk or self._initializer or
-                    six.callable(self._initializer)):
-                # We don't want to end up defaulting this to 0 if creating a
-                # new instance unless an initializer is provided. Instead,
-                # we'll want to handle this the next time the object is
-                # accessed.
-                return
-
-            if self._initializer and six.callable(self._initializer):
-                self._locks[model_instance] = 1
-                value = self._initializer(model_instance)
-                del self._locks[model_instance]
-            else:
-                value = 0
-
-            setattr(model_instance, self.attname, value)
-
-            if model_instance.pk:
-                model_instance.save()
-
-        super(CounterField, self).contribute_to_class(cls, name)
-
-        setattr(cls, 'increment_%s' % self.name, _increment)
-        setattr(cls, 'decrement_%s' % self.name, _decrement)
-        setattr(cls, 'reload_%s' % self.name, _reload)
-        setattr(cls, 'reinit_%s' % self.name, _reinit)
-        setattr(cls, self.attname, self)
+from djblets.db.fields import (Base64DecodedValue, Base64Field,
+                               Base64FieldCreator, CounterField, JSONField,
+                               ModificationTimestampField)
+from djblets.db.validators import validate_json
 
-        models.signals.post_init.connect(self._post_init, sender=cls)
 
-    def _post_init(self, instance=None, **kwargs):
-        if not instance or instance in self._locks:
-            # Prevent the possibility of recursive lookups where this
-            # same CounterField on this same instance tries to initialize
-            # more than once. In this case, this will have the updated
-            # value shortly.
-            return
+warnings.warn('djblets.util.fields is deprecated. Use '
+              'djblets.db.fields instead.', DeprecationWarning)
 
-        value = self.value_from_object(instance)
 
-        if value is None:
-            reinit = getattr(instance, 'reinit_%s' % self.name)
-            reinit()
+__all__ = [
+    'Base64DecodedValue',
+    'Base64Field',
+    'Base64FieldCreator',
+    'CounterField',
+    'JSONField',
+    'ModificationTimestampField',
+    'validate_json',
+]
diff --git a/djblets/util/forms.py b/djblets/util/forms.py
index f186fe1931570cce923093ad9e037c2b63501711..8f5b2fb7ff25c5bdf7f3ad98c1c13f680337e766 100644
--- a/djblets/util/forms.py
+++ b/djblets/util/forms.py
@@ -1,13 +1,14 @@
 from __future__ import unicode_literals
+import warnings
 
-import pytz
-from django import forms
+from djblets.forms.fields import TIMEZONE_CHOICES, TimeZoneField
 
 
-TIMEZONE_CHOICES = tuple(zip(pytz.common_timezones, pytz.common_timezones))
+warnings.warn('djblets.util.forms is deprecated. Use '
+              'djblets.forms.fields instead.', DeprecationWarning)
 
 
-class TimeZoneField(forms.ChoiceField):
-    """A form field that only allows pytz common timezones as the choices."""
-    def __init__(self, choices=TIMEZONE_CHOICES, *args, **kwargs):
-        super(TimeZoneField, self).__init__(choices, *args, **kwargs)
+__all__ = [
+    'TIMEZONE_CHOICES',
+    'TimeZoneField',
+]
diff --git a/djblets/util/misc.py b/djblets/util/misc.py
index e0853ebc902245e3b73c42a8fb732fd34b4010d2..e0583ffc3ac76fab4fc7b8a350477c70a6e228d2 100644
--- a/djblets/util/misc.py
+++ b/djblets/util/misc.py
@@ -25,315 +25,27 @@
 #
 
 from __future__ import unicode_literals
-
-import logging
-import os
-import zlib
-from hashlib import md5
-
-from django.core.cache import cache
-from django.core.urlresolvers import RegexURLPattern
-from django.conf import settings
-from django.conf.urls import url
-from django.contrib.sites.models import Site
-from django.db.models.manager import Manager
-from django.utils import importlib
-from django.views.decorators.cache import never_cache
-
-from djblets.util.compat.six.moves import cStringIO as StringIO
-from djblets.util.compat.six.moves import cPickle as pickle
-
-
-DEFAULT_EXPIRATION_TIME = 60 * 60 * 24 * 30 # 1 month
-CACHE_CHUNK_SIZE = 2**20 - 1024 # almost 1M (memcached's slab limit)
-
-# memcached key size constraint (typically 250, but leave a few bytes for the
-# large data handling)
-MAX_KEY_SIZE = 240
-
-
-class MissingChunkError(Exception):
-    pass
-
-
-def _cache_fetch_large_data(cache, key, compress_large_data):
-    chunk_count = cache.get(make_cache_key(key))
-    data = []
-
-    chunk_keys = [make_cache_key('%s-%d' % (key, i))
-                  for i in range(int(chunk_count))]
-    chunks = cache.get_many(chunk_keys)
-    for chunk_key in chunk_keys:
-        try:
-            data.append(chunks[chunk_key][0])
-        except KeyError:
-            logging.debug('Cache miss for key %s.' % chunk_key)
-            raise MissingChunkError
-
-    data = b''.join(data)
-
-    if compress_large_data:
-        data = zlib.decompress(data)
-
-    try:
-        unpickler = pickle.Unpickler(StringIO(data))
-        data = unpickler.load()
-    except Exception as e:
-        logging.warning('Unpickle error for cache key "%s": %s.' % (key, e))
-        raise e
-
-    return data
-
-
-def _cache_store_large_data(cache, key, data, expiration, compress_large_data):
-    # We store large data in the cache broken into chunks that are 1M in size.
-    # To do this easily, we first pickle the data and compress it with zlib.
-    # This gives us a string which can be chunked easily. These are then stored
-    # individually in the cache as single-element lists (so the cache backend
-    # doesn't try to convert binary data to utf8). The number of chunks needed
-    # is stored in the cache under the unadorned key
-    file = StringIO()
-    pickler = pickle.Pickler(file)
-    pickler.dump(data)
-    data = file.getvalue()
-
-    if compress_large_data:
-        data = zlib.compress(data)
-
-    i = 0
-    while len(data) > CACHE_CHUNK_SIZE:
-        chunk = data[0:CACHE_CHUNK_SIZE]
-        data = data[CACHE_CHUNK_SIZE:]
-        cache.set(make_cache_key('%s-%d' % (key, i)), [chunk], expiration)
-        i += 1
-    cache.set(make_cache_key('%s-%d' % (key, i)), [data], expiration)
-
-    cache.set(make_cache_key(key), '%d' % (i + 1), expiration)
-
-
-def cache_memoize(key, lookup_callable,
-                  expiration=getattr(settings, 'CACHE_EXPIRATION_TIME',
-                                     DEFAULT_EXPIRATION_TIME),
-                  force_overwrite=False,
-                  large_data=False,
-                  compress_large_data=True):
-    """Memoize the results of a callable inside the configured cache.
-
-    Keyword arguments:
-    expiration          -- The expiration time for the key.
-    force_overwrite     -- If True, the value will always be computed and stored
-                           regardless of whether it exists in the cache already.
-    large_data          -- If True, the resulting data will be pickled, gzipped,
-                           and (potentially) split up into megabyte-sized chunks.
-                           This is useful for very large, computationally
-                           intensive hunks of data which we don't want to store
-                           in a database due to the way things are accessed.
-    compress_large_data -- Compresses the data with zlib compression when
-                           large_data is True.
-    """
-    if large_data:
-        if not force_overwrite and make_cache_key(key) in cache:
-            try:
-                data = _cache_fetch_large_data(cache, key, compress_large_data)
-                return data
-            except Exception as e:
-                logging.warning('Failed to fetch large data from cache for '
-                                'key %s: %s.' % (key, e))
-        else:
-            logging.debug('Cache miss for key %s.' % key)
-
-        data = lookup_callable()
-        _cache_store_large_data(cache, key, data, expiration,
-                                compress_large_data)
-        return data
-
-    else:
-        key = make_cache_key(key)
-        if not force_overwrite and key in cache:
-            return cache.get(key)
-        data = lookup_callable()
-
-        # Most people will be using memcached, and memcached has a limit of 1MB.
-        # Data this big should be broken up somehow, so let's warn about this.
-        # Users should hopefully be using large_data=True in this case.
-        # XXX - since 'data' may be a sequence that's not a string/unicode,
-        #       this can fail. len(data) might be something like '6' but the
-        #       data could exceed a megabyte. The best way to catch this would
-        #       be an exception, but while python-memcached defines an exception
-        #       type for this, it never uses it, choosing instead to fail
-        #       silently. WTF.
-        if len(data) >= CACHE_CHUNK_SIZE:
-            logging.warning('Cache data for key "%s" (length %s) may be too '
-                            'big for the cache.' % (key, len(data)))
-
-        try:
-            cache.set(key, data, expiration)
-        except:
-            pass
-        return data
-
-
-def make_cache_key(key):
-    """Creates a cache key guaranteed to avoid conflicts and size limits.
-
-    The cache key will be prefixed by the site's domain, and will be
-    changed to an MD5SUM if it's larger than the maximum key size.
-    """
-    try:
-        site = Site.objects.get_current()
-
-        # The install has a Site app, so prefix the domain to the key.
-        # If a SITE_ROOT is defined, also include that, to allow for multiple
-        # instances on the same host.
-        site_root = getattr(settings, 'SITE_ROOT', None)
-
-        if site_root:
-            key = '%s:%s:%s' % (site.domain, site_root, key)
-        else:
-            key = '%s:%s' % (site.domain, key)
-    except:
-        # The install doesn't have a Site app, so use the key as-is.
-        pass
-
-    # Adhere to memcached key size limit
-    if len(key) > MAX_KEY_SIZE:
-        digest = md5(key.encode('utf-8')).hexdigest();
-
-        # Replace the excess part of the key with a digest of the key
-        key = key[:MAX_KEY_SIZE - len(digest)] + digest
-
-    # Make sure this is a non-unicode string, in order to prevent errors
-    # with some backends.
-    key = key.encode('utf-8')
-
-    return key
-
-
-def get_object_or_none(klass, *args, **kwargs):
-    if isinstance(klass, Manager):
-        manager = klass
-        klass = manager.model
-    else:
-        manager = klass._default_manager
-
-    try:
-        return manager.get(*args, **kwargs)
-    except klass.DoesNotExist:
-        return None
-
-
-def never_cache_patterns(prefix, *args):
-    """
-    Prevents any included URLs from being cached by the browser.
-
-    It's sometimes desirable not to allow browser caching for a set of URLs.
-    This can be used just like patterns().
-    """
-    pattern_list = []
-    for t in args:
-        if isinstance(t, (list, tuple)):
-            t = url(prefix=prefix, *t)
-        elif isinstance(t, RegexURLPattern):
-            t.add_prefix(prefix)
-
-        t._callback = never_cache(t.callback)
-        pattern_list.append(t)
-
-    return pattern_list
-
-
-
-def generate_media_serial():
-    """
-    Generates a media serial number that can be appended to a media filename
-    in order to make a URL that can be cached forever without fear of change.
-    The next time the file is updated and the server is restarted, a new
-    path will be accessed and cached.
-
-    This will crawl the media files (using directories in MEDIA_SERIAL_DIRS if
-    specified, or all of STATIC_ROOT otherwise), figuring out the latest
-    timestamp, and return that value.
-    """
-    MEDIA_SERIAL = getattr(settings, "MEDIA_SERIAL", 0)
-
-    if not MEDIA_SERIAL:
-        media_dirs = getattr(settings, "MEDIA_SERIAL_DIRS", ["."])
-
-        for media_dir in media_dirs:
-            media_path = os.path.join(settings.STATIC_ROOT, media_dir)
-
-            for root, dirs, files in os.walk(media_path):
-                for name in files:
-                    mtime = int(os.stat(os.path.join(root, name)).st_mtime)
-
-                    if mtime > MEDIA_SERIAL:
-                        MEDIA_SERIAL = mtime
-
-        setattr(settings, "MEDIA_SERIAL", MEDIA_SERIAL)
-
-
-def generate_ajax_serial():
-    """
-    Generates a serial number that can be appended to filenames involving
-    dynamic loads of URLs in order to make a URL that can be cached forever
-    without fear of change.
-
-    This will crawl the template files (using directories in TEMPLATE_DIRS),
-    figuring out the latest timestamp, and return that value.
-    """
-    AJAX_SERIAL = getattr(settings, "AJAX_SERIAL", 0)
-
-    if not AJAX_SERIAL:
-        template_dirs = getattr(settings, "TEMPLATE_DIRS", ["."])
-
-        for template_path in template_dirs:
-            for root, dirs, files in os.walk(template_path):
-                for name in files:
-                    mtime = int(os.stat(os.path.join(root, name)).st_mtime)
-
-                    if mtime > AJAX_SERIAL:
-                        AJAX_SERIAL = mtime
-
-        setattr(settings, "AJAX_SERIAL", AJAX_SERIAL)
-
-
-def generate_locale_serial(packages):
-    """Generate a locale serial for the given set of packages.
-
-    This will be equal to the most recent mtime of all the .mo files that
-    contribute to the localization of the given packages.
-    """
-    serial = 0
-
-    paths = []
-    for package in packages:
-        try:
-            p = importlib.import_module(package)
-            path = os.path.join(os.path.dirname(p.__file__), 'locale')
-            paths.append(path)
-        except Exception as e:
-            logging.error(
-                'Failed to import package %s to compute locale serial: %s'
-                % (package, e))
-
-    for locale_path in paths:
-        for root, dirs, files in os.walk(locale_path):
-            for name in files:
-                if name.endswith('.mo'):
-                    mtime = int(os.stat(os.path.join(root, name)).st_mtime)
-                    if mtime > serial:
-                        serial = mtime
-
-    return serial
-
-
-def generate_cache_serials():
-    """
-    Wrapper around generate_media_serial and generate_ajax_serial to
-    generate all serial numbers in one go.
-
-    This should be called early in the startup, such as in the site's
-    main urls.py.
-    """
-    generate_media_serial()
-    generate_ajax_serial()
+import warnings
+
+from djblets.cache.backend import cache_memoize, make_cache_key
+from djblets.cache.serials import (generate_ajax_serial,
+                                   generate_cache_serials,
+                                   generate_locale_serial,
+                                   generate_media_serial)
+from djblets.db.query import get_object_or_none
+from djblets.urls.patterns import never_cache_patterns
+
+
+warnings.warn('djblets.util.misc is deprecated', DeprecationWarning)
+
+
+__all__ = [
+    'cache_memoize',
+    'generate_ajax_serial',
+    'generate_cache_serials',
+    'generate_locale_serial',
+    'generate_media_serial',
+    'get_object_or_none',
+    'make_cache_key',
+    'never_cache_patterns',
+]
diff --git a/djblets/util/rooturl.py b/djblets/util/rooturl.py
index e8704ee2be8b08daf5ba018bfac3d5ca94110cef..f07b47c154dcd333c404a92a8c793e8570676b1d 100644
--- a/djblets/util/rooturl.py
+++ b/djblets/util/rooturl.py
@@ -1,42 +1,11 @@
-#
-# rooturl.py -- URL patterns for rooted sites.
-#
-# Copyright (c) 2007-2009  Christian Hammond
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-#
-
 from __future__ import unicode_literals
+import warnings
+
+from djblets.urls.root import urlpatterns
 
-from django.conf import settings
-from django.conf.urls import patterns, include, handler404, handler500
-from django.core.exceptions import ImproperlyConfigured
 
+warnings.warn('djblets.util.rooturl is deprecated. Use '
+              'djblets.urls.root instead.', DeprecationWarning)
 
-# Ensures that we can run nose on this without needing to set SITE_ROOT.
-# Also serves to let people know if they set one variable without the other.
-if hasattr(settings, "SITE_ROOT"):
-    if not hasattr(settings, "SITE_ROOT_URLCONF"):
-        raise ImproperlyConfigured("SITE_ROOT_URLCONF must be set when "
-                                   "using SITE_ROOT")
 
-    urlpatterns = patterns('',
-        (r'^%s' % settings.SITE_ROOT[1:], include(settings.SITE_ROOT_URLCONF)),
-    )
+__all__ = ['urlpatterns']
diff --git a/djblets/util/testing.py b/djblets/util/testing.py
index ed28b2b6a873ae1d79b55bd148b391b915dcc71b..15156f0a3ee34d2b11bd96fe3be1b5c62503509b 100644
--- a/djblets/util/testing.py
+++ b/djblets/util/testing.py
@@ -25,8 +25,14 @@
 #
 
 from __future__ import unicode_literals
+import warnings
 
 from djblets.testing.testcases import (StubNodeList, StubParser,
                                        TagTest, TestCase)
 
+
+warnings.warn('djblets.util.testing is deprecated. Use '
+              'djblets.testing.testcases instead.', DeprecationWarning)
+
+
 __all__ = ['StubNodeList', 'StubParser', 'TagTest', 'TestCase']
diff --git a/djblets/util/tests.py b/djblets/util/tests.py
index ae253f1c272ac3f98d33485cd89ef70896f6294a..0679527d7b20637c27dc984e1669c919697aec6a 100644
--- a/djblets/util/tests.py
+++ b/djblets/util/tests.py
@@ -39,14 +39,16 @@ from django.template import Token, TOKEN_TEXT, TemplateSyntaxError
 from django.utils import six
 from django.utils.html import strip_spaces_between_tags
 
+from djblets.cache.backend import (cache_memoize, make_cache_key,
+                                  CACHE_CHUNK_SIZE)
+from djblets.db.fields import JSONField
+from djblets.testing.testcases import TestCase, TagTest
+from djblets.urls.resolvers import DynamicURLResolver
 from djblets.util.http import (get_http_accept_lists,
                                get_http_requested_mimetype,
                                is_mimetype_a)
-from djblets.util.misc import cache_memoize, make_cache_key, CACHE_CHUNK_SIZE
-from djblets.util.testing import TestCase, TagTest
 from djblets.util.templatetags import (djblets_deco, djblets_email,
                                        djblets_utils)
-from djblets.util.urlresolvers import DynamicURLResolver
 
 
 def normalize_html(s):
@@ -364,7 +366,6 @@ class JSONFieldTests(unittest.TestCase):
     """Unit tests for JSONField."""
 
     def setUp(self):
-        from djblets.util.fields import JSONField
         self.field = JSONField()
 
     def test_dumps_with_json_dict(self):
@@ -379,12 +380,6 @@ class JSONFieldTests(unittest.TestCase):
         self.assertTrue(isinstance(result, six.string_types))
         self.assertEqual(result, '{"a": 1, "b": 2}')
 
-    def test_dumps_with_json_dict(self):
-        """Testing JSONField with dumping a JSON dictionary"""
-        result = self.field.dumps({'a': 1})
-        self.assertTrue(isinstance(result, six.string_types))
-        self.assertEqual(result, '{"a": 1}')
-
     def test_loading_json_dict(self):
         """Testing JSONField with loading a JSON dictionary"""
         result = self.field.loads('{"a": 1, "b": 2}')
diff --git a/djblets/util/urlresolvers.py b/djblets/util/urlresolvers.py
index 0d73f58dbf56a50adf852fabccb6afa18dd4437b..5c5b442276e4f83f17768031bb7cd587580d019c 100644
--- a/djblets/util/urlresolvers.py
+++ b/djblets/util/urlresolvers.py
@@ -1,118 +1,11 @@
 from __future__ import unicode_literals
+import warnings
 
-from django.core.urlresolvers import (RegexURLResolver, clear_url_caches,
-                                      get_resolver)
+from djblets.urls.resolvers import DynamicURLResolver
 
 
-class DynamicURLResolver(RegexURLResolver):
-    """A URL resolver that allows for dynamically altering URL patterns.
+warnings.warn('djblets.util.urlresolvers is deprecated. See '
+              'djblets.urls.resolvers.', DeprecationWarning)
 
-    A standard RegexURLResolver expects that a list of URL patterns will
-    be set once and never again change. In most applications, this is a
-    good assumption. However, some that are more specialized may need
-    to be able to swap in URL patterns dynamically. For example, those
-    that can plug in third-party extensions.
 
-    DynamicURLResolver makes it easy to add and remove URL patterns. Any
-    time the list of URL patterns changes, they'll be immediately available
-    for all URL resolution and reversing.
-
-    The usage is very simple::
-
-        dynamic_patterns = DynamicURLResolver()
-        urlpatterns = patterns('', dynamic_patterns)
-
-        dynamic_patterns.add_patterns([
-            url(...),
-            url(...),
-        ])
-
-    DynamicURLResolver will handle managing all the lookup caches to ensure
-    that there won't be any stale entries affecting any dynamic URL patterns.
-    """
-    def __init__(self, regex=r'', app_name=None, namespace=None):
-        super(DynamicURLResolver, self).__init__(regex=regex,
-                                                 urlconf_name=[],
-                                                 app_name=app_name,
-                                                 namespace=namespace)
-        self._resolver_chain = None
-
-    @property
-    def url_patterns(self):
-        """Returns the current list of URL patterns.
-
-        This is a simplified version of RegexURLResolver.url_patterns that
-        simply returns the preset list of patterns. Unlike the original
-        function, we don't care if the list is empty.
-        """
-        # Internally, urlconf_module represents whatever we're accessing
-        # for the list of URLs. It can be a list, or it can be something
-        # with a 'urlpatterns' property (intended for a urls.py). However,
-        # we force this to be a list in the constructor (as urlconf_name,
-        # which gets stored as urlconf_module), so we know we can just
-        # return it as-is.
-        return self.urlconf_module
-
-    def add_patterns(self, patterns):
-        """Adds a list of URL patterns.
-
-        The patterns will be made immediately available for use for any
-        lookups or reversing.
-        """
-        self.url_patterns.extend(patterns)
-        self._clear_cache()
-
-    def remove_patterns(self, patterns):
-        """Removes a list of URL patterns.
-
-        These patterns will no longer be able to be looked up or reversed.
-        """
-        for pattern in patterns:
-            self.url_patterns.remove(pattern)
-
-        self._clear_cache()
-
-    def _clear_cache(self):
-        """Clears the internal resolver caches.
-
-        This will clear all caches for this resolver and every parent
-        of this resolver, in order to ensure that the next lookup or reverse
-        will result in a lookup in this resolver. By default, every
-        RegexURLResolver in Django will cache all results from its children.
-
-        We take special care to only clear the caches of the resolvers in
-        our parent chain.
-        """
-        for resolver in self.resolver_chain:
-            resolver._reverse_dict.clear()
-            resolver._namespace_dict.clear()
-            resolver._app_dict.clear()
-
-        clear_url_caches()
-
-    @property
-    def resolver_chain(self):
-        """Returns every RegexURLResolver between here and the root.
-
-        The list of resolvers is cached in order to prevent having to locate
-        the resolvers more than once.
-        """
-        if self._resolver_chain is None:
-            self._resolver_chain = \
-                self._find_resolver_chain(get_resolver(None))
-
-        return self._resolver_chain
-
-    def _find_resolver_chain(self, resolver):
-        if resolver == self:
-            return [resolver]
-
-        for url_pattern in resolver.url_patterns:
-            if isinstance(url_pattern, RegexURLResolver):
-                resolvers = self._find_resolver_chain(url_pattern)
-
-                if resolvers:
-                    resolvers.append(resolver)
-                    return resolvers
-
-        return []
+__all__ = ['DynamicURLResolver']
diff --git a/djblets/util/views.py b/djblets/util/views.py
index 8de8027116c363f97b756badb655921854caf043..921a6822f9749c32da659dd6d8fe3fb8b905c379 100644
--- a/djblets/util/views.py
+++ b/djblets/util/views.py
@@ -3,7 +3,8 @@ from __future__ import unicode_literals
 from django.utils.translation import get_language
 from django.views.i18n import javascript_catalog
 
-from djblets.util.misc import cache_memoize, generate_locale_serial
+from djblets.cache.backend import cache_memoize
+from djblets.cache.serials import generate_locale_serial
 
 
 locale_serials = {}
diff --git a/djblets/webapi/core.py b/djblets/webapi/core.py
index a9a21f8e8dfe6332d6236a19559f6f93c446761f..17384e1d0fb8c7a0bfe05bfca243229bb6ceccfe 100644
--- a/djblets/webapi/core.py
+++ b/djblets/webapi/core.py
@@ -231,7 +231,7 @@ class WebAPIResponse(HttpResponse):
             # to save the file. It's not great, but it's what we must do.
             mimetype = 'text/plain'
 
-        super(WebAPIResponse, self).__init__(mimetype=mimetype,
+        super(WebAPIResponse, self).__init__(content_type=mimetype,
                                              status=status)
         self.request = request
         self.callback = request.GET.get('callback', None)
diff --git a/djblets/webapi/resources.py b/djblets/webapi/resources.py
index ca31dfba3bc7657d9625a4e37da53a59dd96767b..ce895982918a2125f0f30ee9da0bd53ab4bb29b1 100644
--- a/djblets/webapi/resources.py
+++ b/djblets/webapi/resources.py
@@ -20,7 +20,7 @@ from djblets.util.decorators import augment_method_from
 from djblets.util.http import (get_modified_since, etag_if_none_match,
                                set_last_modified, set_etag,
                                get_http_requested_mimetype)
-from djblets.util.misc import never_cache_patterns
+from djblets.urls.patterns import never_cache_patterns
 from djblets.webapi.auth import check_login
 from djblets.webapi.core import (WebAPIResponse,
                                  WebAPIResponseError,
diff --git a/djblets/webapi/tests.py b/djblets/webapi/tests.py
index 86f61d4970084cb4298827e3cdc9e50c19347888..63bd7af1f4b9da9fddee9a8f0b8c28c07934fb51 100644
--- a/djblets/webapi/tests.py
+++ b/djblets/webapi/tests.py
@@ -27,7 +27,7 @@ from __future__ import print_function, unicode_literals
 from django.contrib.auth.models import AnonymousUser, User
 from django.test.client import RequestFactory
 
-from djblets.util.testing import TestCase
+from djblets.testing.testcases import TestCase
 from djblets.webapi.decorators import (copy_webapi_decorator_data,
                                        webapi_login_required,
                                        webapi_permission_required,
