diff --git a/.coveragerc b/.coveragerc index 72b67fd2..53d94a13 100644 --- a/.coveragerc +++ b/.coveragerc @@ -3,6 +3,7 @@ source = constance branch = 1 omit = */pytest.py + */tests/* [report] omit = *tests*,*migrations*,.tox/*,setup.py,*settings.py diff --git a/AUTHORS b/AUTHORS index 738055a6..2431dcdd 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1,14 +1,18 @@ Ales Zoulek -Alexander frenzel +Alexander Frenzel +Alexandr Artemyev Bouke Haarsma Camilo Nova Charlie Hornsby Curtis Maloney Dan Poirier David Burke +Dmitriy Tatarkin +Elisey Zanko Florian Apolloner Igor Támara Ilya Chichak +Ivan Klass Jake Merdich Jannis Leidel Janusz Harkot @@ -32,6 +36,7 @@ Pierre-Olivier Marec Roman Krejcik Silvan Spross Sławek Ehlert +Vladas Tamoshaitis Vojtech Jasny Yin Jifeng illumin-us-r3v0lution @@ -40,7 +45,3 @@ saw2th trbs vl <1844144@gmail.com> vl -Vladas Tamoshaitis -Dmitriy Tatarkin -Alexandr Artemyev -Elisey Zanko diff --git a/constance/backends/database.py b/constance/backends/database.py index 912c41e8..be3e028c 100644 --- a/constance/backends/database.py +++ b/constance/backends/database.py @@ -11,6 +11,8 @@ from constance import settings from constance import signals from constance.backends import Backend +from constance.codecs import dumps +from constance.codecs import loads class DatabaseBackend(Backend): @@ -64,7 +66,7 @@ def mget(self, keys): try: stored = self._model._default_manager.filter(key__in=keys) for const in stored: - yield keys[const.key], const.value + yield keys[const.key], loads(const.value) except (OperationalError, ProgrammingError): pass @@ -79,7 +81,7 @@ def get(self, key): if value is None: match = self._model._default_manager.filter(key=key).first() if match: - value = match.value + value = loads(match.value) if self._cache: self._cache.add(key, value) return value @@ -100,16 +102,16 @@ def set(self, key, value): except self._model.DoesNotExist: try: with transaction.atomic(using=queryset.db): - queryset.create(key=key, value=value) + queryset.create(key=key, value=dumps(value)) created = True except IntegrityError: # Allow concurrent writes constance = queryset.get(key=key) if not created: - old_value = constance.value - constance.value = value - constance.save() + old_value = loads(constance.value) + constance.value = dumps(value) + constance.save(update_fields=['value']) else: old_value = None diff --git a/constance/backends/redisd.py b/constance/backends/redisd.py index 506c6863..cb5bef7e 100644 --- a/constance/backends/redisd.py +++ b/constance/backends/redisd.py @@ -1,5 +1,3 @@ -from pickle import dumps -from pickle import loads from threading import RLock from time import monotonic @@ -9,8 +7,9 @@ from constance import settings from constance import signals from constance import utils - -from . import Backend +from constance.backends import Backend +from constance.codecs import dumps +from constance.codecs import loads class RedisBackend(Backend): @@ -36,7 +35,7 @@ def add_prefix(self, key): def get(self, key): value = self._rd.get(self.add_prefix(key)) if value: - return loads(value) # noqa: S301 + return loads(value) return None def mget(self, keys): @@ -45,11 +44,11 @@ def mget(self, keys): prefixed_keys = [self.add_prefix(key) for key in keys] for key, value in zip(keys, self._rd.mget(prefixed_keys)): if value: - yield key, loads(value) # noqa: S301 + yield key, loads(value) def set(self, key, value): old_value = self.get(key) - self._rd.set(self.add_prefix(key), dumps(value, protocol=settings.REDIS_PICKLE_VERSION)) + self._rd.set(self.add_prefix(key), dumps(value)) signals.config_updated.send(sender=config, key=key, old_value=old_value, new_value=value) diff --git a/constance/codecs.py b/constance/codecs.py new file mode 100644 index 00000000..5996e344 --- /dev/null +++ b/constance/codecs.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +import json +import logging +import uuid +from datetime import date +from datetime import datetime +from datetime import time +from datetime import timedelta +from decimal import Decimal +from typing import Any +from typing import Protocol +from typing import TypeVar + +logger = logging.getLogger(__name__) + +DEFAULT_DISCRIMINATOR = 'default' + + +class JSONEncoder(json.JSONEncoder): + """Django-constance custom json encoder.""" + + def default(self, o): + for discriminator, (t, _, encoder) in _codecs.items(): + if isinstance(o, t): + return _as(discriminator, encoder(o)) + raise TypeError(f'Object of type {o.__class__.__name__} is not JSON serializable') + + +def _as(discriminator: str, v: Any) -> dict[str, Any]: + return {'__type__': discriminator, '__value__': v} + + +def dumps(obj, _dumps=json.dumps, cls=JSONEncoder, default_kwargs=None, **kwargs): + """Serialize object to json string.""" + default_kwargs = default_kwargs or {} + is_default_type = isinstance(obj, (str, int, bool, float, type(None))) + return _dumps( + _as(DEFAULT_DISCRIMINATOR, obj) if is_default_type else obj, cls=cls, **dict(default_kwargs, **kwargs) + ) + + +def loads(s, _loads=json.loads, **kwargs): + """Deserialize json string to object.""" + return _loads(s, object_hook=object_hook, **kwargs) + + +def object_hook(o: dict) -> Any: + """Hook function to perform custom deserialization.""" + if o.keys() == {'__type__', '__value__'}: + if o['__type__'] == DEFAULT_DISCRIMINATOR: + return o['__value__'] + codec = _codecs.get(o['__type__']) + if not codec: + raise ValueError(f'Unsupported type: {o["__type__"]}') + return codec[1](o['__value__']) + logger.error('Cannot deserialize object: %s', o) + raise ValueError(f'Invalid object: {o}') + + +T = TypeVar('T') + + +class Encoder(Protocol[T]): + def __call__(self, value: T, /) -> str: ... # pragma: no cover + + +class Decoder(Protocol[T]): + def __call__(self, value: str, /) -> T: ... # pragma: no cover + + +def register_type(t: type[T], discriminator: str, encoder: Encoder[T], decoder: Decoder[T]): + if not discriminator: + raise ValueError('Discriminator must be specified') + if _codecs.get(discriminator) or discriminator == DEFAULT_DISCRIMINATOR: + raise ValueError(f'Type with discriminator {discriminator} is already registered') + _codecs[discriminator] = (t, decoder, encoder) + + +_codecs: dict[str, tuple[type, Decoder, Encoder]] = {} + + +def _register_default_types(): + # NOTE: datetime should be registered before date, because datetime is also instance of date. + register_type(datetime, 'datetime', datetime.isoformat, datetime.fromisoformat) + register_type(date, 'date', lambda o: o.isoformat(), lambda o: datetime.fromisoformat(o).date()) + register_type(time, 'time', lambda o: o.isoformat(), time.fromisoformat) + register_type(Decimal, 'decimal', str, Decimal) + register_type(uuid.UUID, 'uuid', lambda o: o.hex, uuid.UUID) + register_type(timedelta, 'timedelta', lambda o: o.total_seconds(), lambda o: timedelta(seconds=o)) + + +_register_default_types() diff --git a/constance/migrations/0001_initial.py b/constance/migrations/0001_initial.py index 16efe611..f1e187a2 100644 --- a/constance/migrations/0001_initial.py +++ b/constance/migrations/0001_initial.py @@ -1,4 +1,3 @@ -import picklefield.fields from django.db import migrations from django.db import models @@ -14,7 +13,7 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('key', models.CharField(max_length=255, unique=True)), - ('value', picklefield.fields.PickledObjectField(blank=True, editable=False, null=True)), + ('value', models.TextField(blank=True, editable=False, null=True)), ], options={ 'verbose_name': 'constance', diff --git a/constance/migrations/0003_drop_pickle.py b/constance/migrations/0003_drop_pickle.py new file mode 100644 index 00000000..726c5aa0 --- /dev/null +++ b/constance/migrations/0003_drop_pickle.py @@ -0,0 +1,53 @@ +import logging +import pickle +from base64 import b64decode +from importlib import import_module + +from django.db import migrations + +from constance import settings +from constance.codecs import dumps + +logger = logging.getLogger(__name__) + + +def import_module_attr(path): + package, module = path.rsplit('.', 1) + return getattr(import_module(package), module) + + +def migrate_pickled_data(apps, schema_editor) -> None: # pragma: no cover + Constance = apps.get_model('constance', 'Constance') + + for constance in Constance.objects.exclude(value=None): + constance.value = dumps(pickle.loads(b64decode(constance.value.encode()))) # noqa: S301 + constance.save(update_fields=['value']) + + if settings.BACKEND in ('constance.backends.redisd.RedisBackend', 'constance.backends.redisd.CachingRedisBackend'): + import redis + + _prefix = settings.REDIS_PREFIX + connection_cls = settings.REDIS_CONNECTION_CLASS + if connection_cls is not None: + _rd = import_module_attr(connection_cls)() + else: + if isinstance(settings.REDIS_CONNECTION, str): + _rd = redis.from_url(settings.REDIS_CONNECTION) + else: + _rd = redis.Redis(**settings.REDIS_CONNECTION) + redis_migrated_data = {} + for key in settings.CONFIG: + prefixed_key = f'{_prefix}{key}' + value = _rd.get(prefixed_key) + if value is not None: + redis_migrated_data[prefixed_key] = dumps(pickle.loads(value)) # noqa: S301 + for prefixed_key, value in redis_migrated_data.items(): + _rd.set(prefixed_key, value) + + +class Migration(migrations.Migration): + dependencies = [('constance', '0002_migrate_from_old_table')] + + operations = [ + migrations.RunPython(migrate_pickled_data), + ] diff --git a/constance/models.py b/constance/models.py index 6427ed33..5d6eeece 100644 --- a/constance/models.py +++ b/constance/models.py @@ -1,20 +1,10 @@ -from django.core.exceptions import ImproperlyConfigured from django.db import models from django.utils.translation import gettext_lazy as _ -try: - from picklefield import PickledObjectField -except ImportError: - raise ImproperlyConfigured( - "Couldn't find the the 3rd party app " - 'django-picklefield which is required for ' - 'the constance database backend.' - ) from None - class Constance(models.Model): key = models.CharField(max_length=255, unique=True) - value = PickledObjectField(null=True, blank=True) + value = models.TextField(null=True, blank=True) class Meta: verbose_name = _('constance') diff --git a/constance/settings.py b/constance/settings.py index 004d7d04..fd8045cf 100644 --- a/constance/settings.py +++ b/constance/settings.py @@ -1,5 +1,3 @@ -import pickle - from django.conf import settings BACKEND = getattr(settings, 'CONSTANCE_BACKEND', 'constance.backends.redisd.RedisBackend') @@ -26,8 +24,6 @@ REDIS_CONNECTION = getattr(settings, 'CONSTANCE_REDIS_CONNECTION', {}) -REDIS_PICKLE_VERSION = getattr(settings, 'CONSTANCE_REDIS_PICKLE_VERSION', pickle.DEFAULT_PROTOCOL) - SUPERUSER_ONLY = getattr(settings, 'CONSTANCE_SUPERUSER_ONLY', True) IGNORE_ADMIN_VERSION_CHECK = getattr(settings, 'CONSTANCE_IGNORE_ADMIN_VERSION_CHECK', False) diff --git a/docs/backends.rst b/docs/backends.rst index fa7c51b7..27d5a806 100644 --- a/docs/backends.rst +++ b/docs/backends.rst @@ -74,16 +74,6 @@ database. Defaults to ``'constance:'``. E.g.:: CONSTANCE_REDIS_PREFIX = 'constance:myproject:' -``CONSTANCE_REDIS_PICKLE_VERSION`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The (optional) protocol version of pickle you want to use to serialize your python -objects when storing in the Redis database. Defaults to ``pickle.DEFAULT_PROTOCOL``. E.g.:: - - CONSTANCE_REDIS_PICKLE_VERSION = pickle.DEFAULT_PROTOCOL - -You might want to pin this value to a specific protocol number, since ``pickle.DEFAULT_PROTOCOL`` -means different things between versions of Python. ``CONSTANCE_REDIS_CACHE_TIMEOUT`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -94,9 +84,7 @@ Defaults to `60` seconds. Database -------- -Database backend stores configuration values in a -standard Django model. It requires the package `django-picklefield`_ for -storing those values. +Database backend stores configuration values in a standard Django model. You must set the ``CONSTANCE_BACKEND`` Django setting to:: @@ -161,8 +149,6 @@ configured cache backend to enable this feature, e.g. "default":: simply set the :setting:`CONSTANCE_DATABASE_CACHE_AUTOFILL_TIMEOUT` setting to ``None``. -.. _django-picklefield: https://pypi.org/project/django-picklefield/ - Memory ------ diff --git a/pyproject.toml b/pyproject.toml index 0a1df271..cb9f625b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,9 +34,6 @@ classifiers = [ "Programming Language :: Python :: Implementation :: CPython", "Topic :: Utilities", ] -dependencies = [ - "django-picklefield", -] [project.optional-dependencies] redis = [ diff --git a/tests/test_codecs.py b/tests/test_codecs.py new file mode 100644 index 00000000..27b741ba --- /dev/null +++ b/tests/test_codecs.py @@ -0,0 +1,90 @@ +import uuid +from datetime import date +from datetime import datetime +from datetime import time +from datetime import timedelta +from decimal import Decimal +from unittest import TestCase + +from constance.codecs import dumps +from constance.codecs import loads +from constance.codecs import register_type + + +class TestJSONSerialization(TestCase): + def setUp(self): + self.datetime = datetime(2023, 10, 5, 15, 30, 0) + self.date = date(2023, 10, 5) + self.time = time(15, 30, 0) + self.decimal = Decimal('10.5') + self.uuid = uuid.UUID('12345678123456781234567812345678') + self.string = 'test' + self.integer = 42 + self.float = 3.14 + self.boolean = True + self.none = None + self.timedelta = timedelta(days=1, hours=2, minutes=3) + + def test_serializes_and_deserializes_default_types(self): + self.assertEqual(dumps(self.datetime), '{"__type__": "datetime", "__value__": "2023-10-05T15:30:00"}') + self.assertEqual(dumps(self.date), '{"__type__": "date", "__value__": "2023-10-05"}') + self.assertEqual(dumps(self.time), '{"__type__": "time", "__value__": "15:30:00"}') + self.assertEqual(dumps(self.decimal), '{"__type__": "decimal", "__value__": "10.5"}') + self.assertEqual(dumps(self.uuid), '{"__type__": "uuid", "__value__": "12345678123456781234567812345678"}') + self.assertEqual(dumps(self.string), '{"__type__": "default", "__value__": "test"}') + self.assertEqual(dumps(self.integer), '{"__type__": "default", "__value__": 42}') + self.assertEqual(dumps(self.float), '{"__type__": "default", "__value__": 3.14}') + self.assertEqual(dumps(self.boolean), '{"__type__": "default", "__value__": true}') + self.assertEqual(dumps(self.none), '{"__type__": "default", "__value__": null}') + self.assertEqual(dumps(self.timedelta), '{"__type__": "timedelta", "__value__": 93780.0}') + for t in ( + self.datetime, + self.date, + self.time, + self.decimal, + self.uuid, + self.string, + self.integer, + self.float, + self.boolean, + self.none, + self.timedelta, + ): + self.assertEqual(t, loads(dumps(t))) + + def test_invalid_deserialization(self): + with self.assertRaisesRegex(ValueError, 'Expecting value'): + loads('THIS_IS_NOT_RIGHT') + with self.assertRaisesRegex(ValueError, 'Invalid object'): + loads('{"__type__": "THIS_IS_NOT_RIGHT", "__value__": "test", "THIS_IS_NOT_RIGHT": "THIS_IS_NOT_RIGHT"}') + with self.assertRaisesRegex(ValueError, 'Unsupported type'): + loads('{"__type__": "THIS_IS_NOT_RIGHT", "__value__": "test"}') + + def test_handles_unknown_type(self): + class UnknownType: + pass + + with self.assertRaisesRegex(TypeError, 'Object of type UnknownType is not JSON serializable'): + dumps(UnknownType()) + + def test_custom_type_serialization(self): + class CustomType: + def __init__(self, value): + self.value = value + + register_type(CustomType, 'custom', lambda o: o.value, lambda o: CustomType(o)) + custom_data = CustomType('test') + json_data = dumps(custom_data) + self.assertEqual(json_data, '{"__type__": "custom", "__value__": "test"}') + deserialized_data = loads(json_data) + self.assertTrue(isinstance(deserialized_data, CustomType)) + self.assertEqual(deserialized_data.value, 'test') + + def test_register_known_type(self): + with self.assertRaisesRegex(ValueError, 'Discriminator must be specified'): + register_type(int, '', lambda o: o.value, lambda o: int(o)) + with self.assertRaisesRegex(ValueError, 'Type with discriminator default is already registered'): + register_type(int, 'default', lambda o: o.value, lambda o: int(o)) + register_type(int, 'new_custom_type', lambda o: o.value, lambda o: int(o)) + with self.assertRaisesRegex(ValueError, 'Type with discriminator new_custom_type is already registered'): + register_type(int, 'new_custom_type', lambda o: o.value, lambda o: int(o)) diff --git a/tox.ini b/tox.ini index c9ff6310..d549ddab 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,6 @@ skip_missing_interpreters = True deps = redis coverage - django-picklefield dj42: django>=4.2,<4.3 dj50: django>=5.0,<5.1 dj51: django>=5.1,<5.2