Skip to content

Commit

Permalink
checkin changes so far
Browse files Browse the repository at this point in the history
  • Loading branch information
rnag committed Dec 17, 2024
1 parent dba8877 commit aaf2077
Show file tree
Hide file tree
Showing 10 changed files with 159 additions and 92 deletions.
5 changes: 0 additions & 5 deletions dataclass_wizard/abstractions.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,11 +285,6 @@ def default_load_to(tp: TypeInfo, extras: Extras) -> str:
def load_after_type_check(tp: TypeInfo, extras: Extras) -> str:
"""
Generate code to load an object after confirming its type.
:param type_str: The type annotation of the field as a string.
:param i: Index of the value being processed.
:param extras: Additional context or dependencies for code generation.
:raises ParseError: If the object type is not as expected.
"""

@staticmethod
Expand Down
45 changes: 31 additions & 14 deletions dataclass_wizard/bases.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
from abc import ABCMeta, abstractmethod
from collections.abc import Sequence
from typing import Callable, Type, Dict, Optional, ClassVar, Union, TypeVar, Sequence, Literal
from typing import Callable, Type, Dict, Optional, ClassVar, Union, TypeVar

from .constants import TAG
from .decorators import cached_class_property
from .models import Condition
from .enums import DateTimeTo, LetterCase, LetterCasePriority
from .v1.enums import KeyAction, KeyCase
from .models import Condition
from .type_def import FrozenKeys, EnvFileType
from .v1.enums import KeyAction, KeyCase


# Create a generic variable that can be 'AbstractMeta', or any subclass.
Expand Down Expand Up @@ -47,10 +46,12 @@ def __or__(cls: META, other: META) -> META:
# defined on the abstract class. Use `other` instead, which
# *will* be a concrete subclass of `AbstractMeta`.
src = other
# noinspection PyTypeChecker
for k in src.fields_to_merge:
if k in other_dict:
base_dict[k] = other_dict[k]
else:
# noinspection PyTypeChecker
for k in src.fields_to_merge:
if k in src_dict:
base_dict[k] = src_dict[k]
Expand All @@ -71,6 +72,7 @@ def __or__(cls: META, other: META) -> META:
# In a reversed MRO, the inheritance tree looks like this:
# |___ object -> AbstractMeta -> BaseJSONWizardMeta -> ...
# So here, we want to choose the third-to-last class in the list.
# noinspection PyUnresolvedReferences
src = src.__mro__[-3]

# noinspection PyTypeChecker
Expand All @@ -89,6 +91,7 @@ def __and__(cls: META, other: META) -> META:
other_dict = other.__dict__

# Set meta attributes here.
# noinspection PyTypeChecker
for k in cls.all_fields:
if k in other_dict:
setattr(cls, k, other_dict[k])
Expand All @@ -115,20 +118,19 @@ class AbstractMeta(metaclass=ABCOrAndMeta):
# Class attribute which enables us to detect a `JSONWizard.Meta` subclass.
__is_inner_meta__ = False

# True to enable Debug mode for additional (more verbose) log output.
# Enable Debug mode for more verbose log output.
#
# The value can also be a `str` or `int` which specifies
# the minimum level for logs in this library to show up.
# This setting can be a `bool`, `int`, or `str`:
# - `True` enables debug mode with default verbosity.
# - A `str` or `int` specifies the minimum log level (e.g., 'DEBUG', 10).
#
# For example, a message is logged whenever an unknown JSON key is
# encountered when `from_dict` or `from_json` is called.
# Debug mode provides additional helpful log messages, including:
# - Logging unknown JSON keys encountered during `from_dict` or `from_json`.
# - Detailed error messages for invalid types during unmarshalling.
#
# This also results in more helpful messages during error handling, which
# can be useful when debugging the cause when values are an invalid type
# (i.e. they don't match the annotation for the field) when unmarshalling
# a JSON object to a dataclass instance.
# Note: Enabling Debug mode may have a minor performance impact.
#
# Note there is a minor performance impact when DEBUG mode is enabled.
# @deprecated and will be removed in V1 - Use `v1_debug` instead.
debug_enabled: ClassVar['bool | int | str'] = False

# When enabled, a specified Meta config for the main dataclass (i.e. the
Expand Down Expand Up @@ -226,6 +228,19 @@ class AbstractMeta(metaclass=ABCOrAndMeta):
# Defaults to False.
v1: ClassVar[bool] = False

# Enable Debug mode for more verbose log output.
#
# This setting can be a `bool`, `int`, or `str`:
# - `True` enables debug mode with default verbosity.
# - A `str` or `int` specifies the minimum log level (e.g., 'DEBUG', 10).
#
# Debug mode provides additional helpful log messages, including:
# - Logging unknown JSON keys encountered during `from_dict` or `from_json`.
# - Detailed error messages for invalid types during unmarshalling.
#
# Note: Enabling Debug mode may have a minor performance impact.
v1_debug: ClassVar['bool | int | str'] = False

# Specifies the letter case used to match JSON keys when mapping them
# to dataclass fields.
#
Expand Down Expand Up @@ -397,11 +412,13 @@ class AbstractEnvMeta(metaclass=ABCOrAndMeta):
# the :func:`dataclasses.field`) in the serialization process.
skip_defaults_if: ClassVar[Condition] = None

# noinspection PyMethodParameters
@cached_class_property
def all_fields(cls) -> FrozenKeys:
"""Return a list of all class attributes"""
return frozenset(AbstractEnvMeta.__annotations__)

# noinspection PyMethodParameters
@cached_class_property
def fields_to_merge(cls) -> FrozenKeys:
"""Return a list of class attributes, minus `__special_attrs__`"""
Expand Down
5 changes: 4 additions & 1 deletion dataclass_wizard/bases_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,10 @@ def bind_to(cls, dataclass: type, create=True, is_default=True,
base_cls=base_loader, v1=cls.v1)
cls_dumper = get_dumper(dataclass, create=create)

if cls.debug_enabled:
if cls.v1_debug:
_enable_debug_mode_if_needed(cls_loader, cls.v1_debug)

elif cls.debug_enabled:
_enable_debug_mode_if_needed(cls_loader, cls.debug_enabled)

if cls.json_key_to_field is not None:
Expand Down
44 changes: 37 additions & 7 deletions dataclass_wizard/class_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,14 +329,44 @@ def _process_field(name: str,
dump_dataclass_field_to_alias):
"""Process a :class:`Field` for a dataclass field."""

if f.path is not None:
if set_paths:
dataclass_field_to_path[name] = f.path
if not f.dump_alias is ExplicitNull:
dump_dataclass_field_to_alias[f.name] = ExplicitNull

else:
if f.load_alias is not None:
load_dataclass_field_to_alias[name] = f.load_alias
if f.dump_alias is not None:
dump_dataclass_field_to_alias[name] = f.dump_alias
# if not f.json.dump:
# field_to_alias[f.name] = ExplicitNull
# elif f.json.all:
# keys = f.json.keys
# if f.json.path:
# if set_paths:
# field_to_path[f.name] = keys
# field_to_alias[f.name] = ''
# else:
# field_to_alias[f.name] = keys[0]


if f.path is not None:
if set_paths:
dataclass_field_to_path[name] = f.path
# TODO: I forgot why this is needed >.>
dump_dataclass_field_to_alias[name] = ''

else:
if f.load_alias is not None:
load_dataclass_field_to_alias[name] = f.load_alias
if f.dump_alias is not None:
dump_dataclass_field_to_alias[name] = f.dump_alias

# if not f.json.dump:
# field_to_alias[f.name] = ExplicitNull
# elif f.json.all:
# keys = f.json.keys
# if f.json.path:
# if set_paths:
# field_to_path[f.name] = keys
# field_to_alias[f.name] = ''
# else:
# field_to_alias[f.name] = keys[0]


def _setup_v1_load_config_for_cls(
Expand Down
4 changes: 4 additions & 0 deletions dataclass_wizard/dumpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
dataclass_to_dumper, set_class_dumper,
CLASS_TO_DUMP_FUNC, setup_dump_config_for_cls_if_needed, get_meta,
dataclass_field_to_load_parser, dataclass_field_to_json_path, is_builtin, dataclass_field_to_skip_if,
v1_dataclass_field_to_alias,
)
from .constants import _DUMP_HOOKS, TAG, CATCH_ALL
from .decorators import _alias
Expand Down Expand Up @@ -288,6 +289,9 @@ def dump_func_for_dataclass(cls: Type[T],
# sub-classes from `DumpMixIn`, these hooks could be customized.
hooks = cls_dumper.__DUMP_HOOKS__

# TODO this is temporary
if meta.v1:
_ = v1_dataclass_field_to_alias(cls)
# Set up the initial dump config for the dataclass.
setup_dump_config_for_cls_if_needed(cls)

Expand Down
10 changes: 5 additions & 5 deletions dataclass_wizard/utils/object_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from ..errors import ParseError


def safe_get(data, path, default=MISSING):
def safe_get(data, path, default=MISSING, raise_=True):
current_data = data
p = path # to avoid "unbound local variable" warnings

Expand All @@ -20,20 +20,20 @@ def safe_get(data, path, default=MISSING):
# AttributeError -
# raised when `data` is an invalid type, such as a `None`
except (IndexError, KeyError, AttributeError) as e:
if default is MISSING:
if raise_ and default is MISSING:
raise _format_err(e, current_data, path, p) from None
return default

# TypeError -
# raised when `data` is a `list`, but we try to use it like a `dict`
except TypeError:
e = TypeError('Invalid path')
raise _format_err(e, current_data, path, p) from None
raise _format_err(e, current_data, path, p, True) from None


def _format_err(e, current_data, path, current_path):
def _format_err(e, current_data, path, current_path, invalid_path=False):
return ParseError(
e, current_data, None,
e, current_data, dict if invalid_path else None,
path=' => '.join(repr(p) for p in path),
current_path=repr(current_path),
)
Expand Down
53 changes: 20 additions & 33 deletions dataclass_wizard/v1/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,11 @@ def load_to_tuple(cls, tp: TypeInfo, extras: Extras):

# Check if the `Tuple` appears in the variadic form
# i.e. Tuple[str, ...]
is_variadic = args and args[-1] is ...
if args:
is_variadic = args[-1] is ...
else:
args = (Any, ...)
is_variadic = True

if is_variadic:
# Parser that handles the variadic form of :class:`Tuple`'s,
Expand Down Expand Up @@ -915,8 +919,7 @@ def load_func_for_dataclass(
# raise RecursiveClassError(cls) from None

field_to_path = dataclass_field_to_json_path(cls)
num_paths = len(field_to_path)
has_json_paths = True if num_paths else False
has_alias_paths = True if field_to_path else False

# Fix for using `auto_assign_tags` and `raise_on_unknown_json_key` together
# See https://github.com/rnag/dataclass-wizard/issues/137
Expand Down Expand Up @@ -962,11 +965,8 @@ def load_func_for_dataclass(
else:
should_raise = should_warn = None

if has_json_paths:
# loop_over_o = num_paths != len(cls_init_fields)
if has_alias_paths:
_locals['safe_get'] = safe_get
# else:
# loop_over_o = True

# Initialize the FuncBuilder
fn_gen = FunctionBuilder()
Expand Down Expand Up @@ -996,26 +996,6 @@ def load_func_for_dataclass(
if pre_assign:
fn_gen.add_line('i = 0')

if has_json_paths:

with fn_gen.try_():
for field, path in field_to_path.items():
if field in field_to_default:
default_value = f'_default_{field}'
_locals[default_value] = field_to_default[field]
extra_args = f', {default_value}'
else:
extra_args = ''
fn_gen.add_line(f'field={field!r}; init_kwargs[field] = field_to_parser[field](safe_get(o, {path!r}{extra_args}))')


# TODO raise some useful message like (ex. on IndexError):
# Field "my_str" of type tuple[float, str] in A2 has invalid value ['123']

with fn_gen.except_(Exception, 'e', ParseError):
fn_gen.add_line('re_raise(e, cls, o, fields, field, v1)')


vars_for_fields = []

if cls_init_fields:
Expand All @@ -1035,6 +1015,18 @@ def load_func_for_dataclass(
and (key := field_to_alias.get(name)) is not None
and name != key):
f_assign = f'field={name!r}; key={key!r}; {val}=o.get(key, MISSING)'

elif (has_alias_paths
and (path := field_to_path.get(name)) is not None):

if name in field_to_default:
f_assign = f'field={name!r}; {val}=safe_get(o, {path!r}, MISSING, False)'
else:
f_assign = f'field={name!r}; {val}=safe_get(o, {path!r})'

# TODO raise some useful message like (ex. on IndexError):
# Field "my_str" of type tuple[float, str] in A2 has invalid value ['123']

elif key_case is None:
field_to_alias[name] = name
f_assign = f'field={name!r}; {val}=o.get(field, MISSING)'
Expand All @@ -1047,11 +1039,6 @@ def load_func_for_dataclass(
string = generate_field_code(cls_loader, new_extras, f, i)

if name in field_to_default:
# default = default_val = field_to_default[name]
# FIXME might need to update default value logic
# if not is_builtin(default):
# default = f'_dflt{i}'
# _locals[default] = default_val
fn_gen.add_line(f_assign)

with fn_gen.if_(f'{val} is not MISSING'):
Expand All @@ -1061,8 +1048,8 @@ def load_func_for_dataclass(
# TODO confirm this is ok
# vars_for_fields.append(f'{name}={var}')
vars_for_fields.append(var)

fn_gen.add_line(f_assign)

with fn_gen.if_(f'{val} is not MISSING'):
fn_gen.add_line(f'{pre_assign}{var} = {string}')

Expand Down
Loading

0 comments on commit aaf2077

Please sign in to comment.