diff --git a/CHANGES.rst b/CHANGES.rst index 69995a073..ebe858921 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -34,6 +34,7 @@ The ASDF Standard is at v1.6.0 - Add support for python 3.12 [#1641] - Move IntegerType to converter and drop cache of converted values. [#1527] +- Remove legacy extension API [#1637] 2.15.1 (2023-08-07) ------------------- diff --git a/asdf/_resolver.py b/asdf/_resolver.py deleted file mode 100644 index 09883b44d..000000000 --- a/asdf/_resolver.py +++ /dev/null @@ -1,171 +0,0 @@ -import sys -import warnings - -from . import constants -from .exceptions import AsdfDeprecationWarning - - -class Resolver: - """ - A class that can be used to map strings with a particular prefix - to another. - """ - - def __init__(self, mappings, prefix): - """ - Parameters - ---------- - mappings : list of tuple or callable - A list of mappings to try, in order. - For each entry: - - - If a callable, must take a string and return a remapped - string. Should return `None` if the mapping does not - apply to the input. - - - If a tuple, the first item is a string prefix to match. - The second item specifies how to create the new result - in Python string formatting syntax. The following - formatting tokens are available, where ``X`` relates to - the ``prefix`` argument: - - - ``{X}``: The entire string passed in. - - ``{X_prefix}``: The prefix of the string that was - matched. - - ``{X_suffix}``: The part of the string following the - prefix. - - prefix : str - The prefix to use for the Python formatting token names. - """ - self._mappings = self._validate_mappings(mappings) - self._prefix = prefix - - def add_mapping(self, mappings, prefix=""): - # Deprecating this because Resolver is used as part of a dictionary key - # and so shouldn't be mutable. - warnings.warn("The 'add_mapping' method is deprecated.", AsdfDeprecationWarning) - - if prefix != self._prefix: - msg = f"Prefix '{prefix}' does not match the Resolver prefix '{self._prefix}'" - raise ValueError(msg) - - self._mappings = self._mappings + self._validate_mappings(mappings) - - def _perform_mapping(self, mapping, input_): - if callable(mapping): - output = mapping(input_) - if output is not None: - return (sys.maxsize, mapping(input_)) - - return None - - if input_.startswith(mapping[0]): - format_tokens = { - self._prefix: input_, - self._prefix + "_prefix": mapping[0], - self._prefix + "_suffix": input_[len(mapping[0]) :], - } - - return len(mapping[0]), mapping[1].format(**format_tokens) - - return None - - def _validate_mappings(self, mappings): - normalized = [] - for mapping in mappings: - if callable(mapping): - normalized.append(mapping) - elif ( - isinstance(mapping, (list, tuple)) - and len(mapping) == 2 - and isinstance(mapping[0], str) - and isinstance(mapping[1], str) - ): - normalized.append(tuple(mapping)) - else: - msg = f"Invalid mapping '{mapping}'" - raise ValueError(msg) - - return tuple(normalized) - - def __call__(self, input_): - candidates = [(0, input_)] - for mapping in self._mappings: - output = self._perform_mapping(mapping, input_) - if output is not None: - candidates.append(output) - - candidates.sort() - return candidates[-1][1] - - def __hash__(self): - return hash(self._mappings) - - def __eq__(self, other): - if not isinstance(other, Resolver): - return NotImplemented - - return self._mappings == other._mappings - - -class ResolverChain: - """ - A chain of Resolvers, each of which is called with the previous Resolver's - output to produce the final transformed string. - """ - - def __init__(self, *resolvers): - """ - Parameters - ---------- - *resolvers : list of Resolver - Resolvers to include in the chain. - """ - self._resolvers = tuple(resolvers) - - def __call__(self, input_): - for resolver in self._resolvers: - input_ = resolver(input_) - return input_ - - def __hash__(self): - return hash(self._resolvers) - - def __eq__(self, other): - if not isinstance(other, ResolverChain): - return NotImplemented - - return self._resolvers == other._resolvers - - -DEFAULT_URL_MAPPING = [] - -DEFAULT_TAG_TO_URL_MAPPING = [(constants.STSCI_SCHEMA_TAG_BASE, "http://stsci.edu/schemas/asdf{tag_suffix}")] - - -def default_url_mapping(uri): - warnings.warn("'default_url_mapping' is deprecated.", AsdfDeprecationWarning) - return default_url_mapping._resolver(uri) - - -default_url_mapping._resolver = Resolver(DEFAULT_URL_MAPPING, "url") - - -def default_tag_to_url_mapping(uri): - warnings.warn("'default_tag_to_url_mapping' is deprecated.", AsdfDeprecationWarning) - return default_tag_to_url_mapping._resolver(uri) - - -default_tag_to_url_mapping._resolver = Resolver(DEFAULT_TAG_TO_URL_MAPPING, "tag") - - -def default_resolver(uri): - warnings.warn( - "The 'default_resolver(...)' function is deprecated. Use 'asdf.extension.get_default_resolver()(...)' instead.", - AsdfDeprecationWarning, - ) - return default_resolver._resolver(uri) - - -default_resolver._resolver = ResolverChain(default_tag_to_url_mapping._resolver, default_url_mapping._resolver) diff --git a/asdf/_tests/_helpers.py b/asdf/_tests/_helpers.py index a3fcd15a7..8306911cd 100644 --- a/asdf/_tests/_helpers.py +++ b/asdf/_tests/_helpers.py @@ -24,11 +24,9 @@ import asdf from asdf import generic_io, versioning -from asdf._resolver import Resolver, ResolverChain from asdf.asdf import AsdfFile, get_asdf_library_info from asdf.constants import YAML_TAG_PREFIX -from asdf.exceptions import AsdfConversionWarning, AsdfDeprecationWarning -from asdf.extension import _legacy +from asdf.exceptions import AsdfConversionWarning from asdf.tags.core import AsdfObject from asdf.versioning import ( AsdfVersion, @@ -97,20 +95,14 @@ def assert_tree_match(old_tree, new_tree, ctx=None, funcname="assert_equal", ign ignore_keys = ["asdf_library", "history"] ignore_keys = set(ignore_keys) - if ctx is None: - version_string = str(versioning.default_version) - ctx = _legacy.default_extensions.extension_list - else: - version_string = ctx.version_string - def recurse(old, new): if id(old) in seen or id(new) in seen: return seen.add(id(old)) seen.add(id(new)) - old_type = ctx._type_index.from_custom_type(type(old), version_string) - new_type = ctx._type_index.from_custom_type(type(new), version_string) + old_type = None + new_type = None if ( old_type is not None @@ -418,35 +410,6 @@ def assert_no_warnings(warning_class=None): ) -def assert_extension_correctness(extension): - """ - Assert that an ASDF extension's types are all correctly formed and - that the extension provides all of the required schemas. - - Parameters - ---------- - extension : asdf._AsdfExtension - The extension to validate - """ - __tracebackhide__ = True - - warnings.warn( - "assert_extension_correctness is deprecated and depends " - "on the deprecated type system. Please use the new " - "extension API: " - "https://asdf.readthedocs.io/en/stable/asdf/extending/converters.html", - AsdfDeprecationWarning, - ) - - resolver = ResolverChain( - Resolver(extension.tag_mapping, "tag"), - Resolver(extension.url_mapping, "url"), - ) - - for extension_type in extension.types: - _assert_extension_type_correctness(extension, extension_type, resolver) - - def _assert_extension_type_correctness(extension, extension_type, resolver): __tracebackhide__ = True diff --git a/asdf/_tests/commands/tests/test_extension.py b/asdf/_tests/commands/tests/test_extension.py index 928f01f4b..7b500f8af 100644 --- a/asdf/_tests/commands/tests/test_extension.py +++ b/asdf/_tests/commands/tests/test_extension.py @@ -1,6 +1,7 @@ import pytest from asdf.commands import find_extensions +from asdf.versioning import supported_versions @pytest.mark.parametrize("summary", [True, False]) @@ -10,7 +11,8 @@ def test_parameter_combinations(summary, tags_only): find_extensions(summary, tags_only) -def test_builtin_extension_included(capsys): +@pytest.mark.parametrize("standard_version", supported_versions) +def test_builtin_extension_included(capsys, standard_version): find_extensions(True, False) captured = capsys.readouterr() - assert "asdf.extension.BuiltinExtension" in captured.out + assert f"core-{standard_version}" in captured.out diff --git a/asdf/_tests/commands/tests/test_tags.py b/asdf/_tests/commands/tests/test_tags.py index 4afa252a4..88caf8153 100644 --- a/asdf/_tests/commands/tests/test_tags.py +++ b/asdf/_tests/commands/tests/test_tags.py @@ -19,7 +19,5 @@ def test_all_tags_present(): tags = {line.strip() for line in iostream.readlines()} af = AsdfFile() - for tag in af._type_index._type_by_tag: - assert tag in tags for tag in af.extension_manager._converters_by_tag: assert tag in tags diff --git a/asdf/_tests/data/custom_flow-1.0.0.yaml b/asdf/_tests/data/custom_flow-1.0.0.yaml deleted file mode 100644 index 49bf2aa7c..000000000 --- a/asdf/_tests/data/custom_flow-1.0.0.yaml +++ /dev/null @@ -1,11 +0,0 @@ -%YAML 1.1 ---- -$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" -id: "http://nowhere.org/schemas/custom/custom_flow-1.0.0" -type: object -properties: - a: - type: number - b: - type: number -flowStyle: block diff --git a/asdf/_tests/data/custom_flow-1.1.0.yaml b/asdf/_tests/data/custom_flow-1.1.0.yaml deleted file mode 100644 index c932dcb46..000000000 --- a/asdf/_tests/data/custom_flow-1.1.0.yaml +++ /dev/null @@ -1,11 +0,0 @@ -%YAML 1.1 ---- -$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" -id: "http://nowhere.org/schemas/custom/custom_flow-1.1.0" -type: object -properties: - c: - type: number - d: - type: number -flowStyle: block diff --git a/asdf/_tests/data/custom_style-1.0.0.yaml b/asdf/_tests/data/custom_style-1.0.0.yaml deleted file mode 100644 index 806f3d395..000000000 --- a/asdf/_tests/data/custom_style-1.0.0.yaml +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" -id: "http://nowhere.org/schemas/custom/custom_style-1.0.0" -type: string -style: literal diff --git a/asdf/_tests/data/default-1.0.0.yaml b/asdf/_tests/data/default-1.0.0.yaml deleted file mode 100644 index 51067a5a2..000000000 --- a/asdf/_tests/data/default-1.0.0.yaml +++ /dev/null @@ -1,55 +0,0 @@ -%YAML 1.1 ---- -$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" -id: "http://nowhere.org/schemas/custom/default-1.0.0" -type: object -properties: - a: - type: integer - default: 42 - b: - type: object - properties: - c: - type: integer - default: 82 - d: - allOf: - - type: object - properties: - e: - type: integer - default: 122 - - type: object - properties: - f: - type: integer - default: 162 - g: - anyOf: - - type: object - properties: - h: - type: integer - default: 202 - - type: object - properties: - i: - type: integer - default: 242 - j: - oneOf: - - type: object - properties: - k: - type: integer - default: 282 - required: [k] - additionalProperties: false - - type: object - properties: - l: - type: integer - default: 322 - required: [l] - additionalProperties: false diff --git a/asdf/_tests/data/foreign_tag_reference-1.0.0.yaml b/asdf/_tests/data/foreign_tag_reference-1.0.0.yaml deleted file mode 100644 index e282b9886..000000000 --- a/asdf/_tests/data/foreign_tag_reference-1.0.0.yaml +++ /dev/null @@ -1,17 +0,0 @@ -%YAML 1.1 ---- -$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" -id: "http://nowhere.org/schemas/custom/foreign_tag_reference-1.0.0" -title: An example custom type for testing tag references - -tag: "tag:nowhere.org:custom/foreign_tag_reference-1.0.0" -type: object -properties: - a: - # Test foreign tag reference using tag URI - $ref: "tag:nowhere.org:custom/tag_reference-1.0.0" - b: - # Test foreign tag reference using tag ID - $ref: "http://nowhere.org/schemas/custom/tag_reference-1.0.0" -required: [a, b] -... diff --git a/asdf/_tests/data/one_of-1.0.0.yaml b/asdf/_tests/data/one_of-1.0.0.yaml deleted file mode 100644 index 5bc5e88d9..000000000 --- a/asdf/_tests/data/one_of-1.0.0.yaml +++ /dev/null @@ -1,21 +0,0 @@ -%YAML 1.1 ---- -$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" -id: "http://nowhere.org/schemas/custom/one_of-1.0.0" -title: | - oneOf test schema -oneOf: - - type: object - properties: - value: - type: number - required: [value] - additionalProperties: false - - - type: object - properties: - value: - type: string - required: [value] - additionalProperties: false -... diff --git a/asdf/_tests/data/tag_reference-1.0.0.yaml b/asdf/_tests/data/tag_reference-1.0.0.yaml deleted file mode 100644 index 4ef5aee62..000000000 --- a/asdf/_tests/data/tag_reference-1.0.0.yaml +++ /dev/null @@ -1,15 +0,0 @@ -%YAML 1.1 ---- -$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" -id: "http://nowhere.org/schemas/custom/tag_reference-1.0.0" -title: An example custom type for testing tag references - -tag: "tag:nowhere.org:custom/tag_reference-1.0.0" -type: object -properties: - name: - type: string - things: - $ref: "tag:stsci.edu:asdf/core/ndarray-1.0.0" -required: [name, things] -... diff --git a/asdf/_tests/objects.py b/asdf/_tests/objects.py deleted file mode 100644 index 95e08f07b..000000000 --- a/asdf/_tests/objects.py +++ /dev/null @@ -1,45 +0,0 @@ -import pytest - -from asdf import util -from asdf._types import CustomType -from asdf.exceptions import AsdfDeprecationWarning - -from ._helpers import get_test_data_path - -with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class CustomTestType(CustomType): - """This class is intended to be inherited by custom types that are used - purely for the purposes of testing. The methods ``from_tree_tagged`` and - ``from_tree`` are implemented solely in order to avoid custom type - conversion warnings. - """ - - @classmethod - def from_tree_tagged(cls, tree, ctx): - return cls.from_tree(tree.data, ctx) - - @classmethod - def from_tree(cls, tree, ctx): - return tree - - -class CustomExtension: - """ - This is the base class that is used for extensions for custom tag - classes that exist only for the purposes of testing. - """ - - @property - def types(self): - return [] - - @property - def tag_mapping(self): - return [("tag:nowhere.org:custom", "http://nowhere.org/schemas/custom{tag_suffix}")] - - @property - def url_mapping(self): - return [ - ("http://nowhere.org/schemas/custom/", util.filepath_to_url(get_test_data_path("")) + "/{url_suffix}.yaml"), - ] diff --git a/asdf/_tests/test_api.py b/asdf/_tests/test_api.py index 9cf562870..3d394ce6f 100644 --- a/asdf/_tests/test_api.py +++ b/asdf/_tests/test_api.py @@ -11,10 +11,8 @@ from numpy.testing import assert_array_equal import asdf -import asdf.extension._legacy as _legacy_extension -from asdf import _resolver as resolver from asdf import config_context, get_config, treeutil, versioning -from asdf.exceptions import AsdfDeprecationWarning, AsdfWarning, ValidationError +from asdf.exceptions import AsdfWarning, ValidationError from asdf.extension import ExtensionProxy from ._helpers import assert_no_warnings, assert_roundtrip_tree, assert_tree_match, yaml_to_asdf @@ -297,12 +295,6 @@ def test_open_pathlib_path(tmp_path): assert (af["data"] == tree["data"]).all() -class FooExtension: - types = [] - tag_mapping = [] - url_mapping = [] - - @pytest.mark.parametrize( ("installed", "extension", "warns"), [ @@ -314,6 +306,9 @@ class FooExtension: ], ) def test_extension_version_check(installed, extension, warns): + class FooExtension: + extension_uri = "asdf://somewhere.org/extensions/foo-1.0.0" + proxy = ExtensionProxy(FooExtension(), package_name="foo", package_version=installed) with config_context() as config: @@ -326,7 +321,7 @@ def test_extension_version_check(installed, extension, warns): "history": { "extensions": [ asdf.tags.core.ExtensionMetadata( - extension_class="asdf._tests.test_api.FooExtension", + extension_uri=FooExtension.extension_uri, software=asdf.tags.core.Software(name="foo", version=extension), ), ], @@ -344,6 +339,29 @@ def test_extension_version_check(installed, extension, warns): af._check_extensions(tree) +def test_extension_check_no_warning_on_builtin(): + """ + Prior to asdf 3.0 files were written using the asdf.extension.BuiltinExtension + (which used the legacy extension api). This extension was removed in + asdf 3.0. We don't want to warn that this extension is missing for every + file that is opened so make sure _check_extensions doesn't warn + that BuiltinExtension is missing. + """ + af = asdf.AsdfFile() + tree = { + "history": { + "extensions": [ + asdf.tags.core.ExtensionMetadata( + extension_class="asdf.extension.BuiltinExtension", + software=asdf.tags.core.Software(name="asdf", version="2.15.1"), + ), + ], + }, + } + with assert_no_warnings(): + af._check_extensions(tree) + + @pytest.mark.parametrize( ("array_inline_threshold", "inline_blocks", "internal_blocks"), [ @@ -413,24 +431,6 @@ def test_array_inline_threshold_string_array(array_inline_threshold, inline_bloc assert len(af._blocks.blocks) == internal_blocks -def test_resolver_deprecations(): - for resolver_method in [ - resolver.default_resolver, - resolver.default_tag_to_url_mapping, - resolver.default_url_mapping, - ]: - with pytest.warns(AsdfDeprecationWarning): - resolver_method("foo") - - -def test_get_default_resolver(): - resolver = _legacy_extension.get_default_resolver() - - result = resolver("tag:stsci.edu:asdf/core/ndarray-1.0.0") - - assert result == "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0" - - def test_history_entries(tmp_path): path = str(tmp_path / "test.asdf") message = "Twas brillig, and the slithy toves" diff --git a/asdf/_tests/test_asdf.py b/asdf/_tests/test_asdf.py index 88366642b..d41e2b31a 100644 --- a/asdf/_tests/test_asdf.py +++ b/asdf/_tests/test_asdf.py @@ -9,7 +9,6 @@ from asdf.entry_points import get_extensions from asdf.exceptions import AsdfWarning from asdf.extension import ExtensionProxy -from asdf.extension._legacy import AsdfExtensionList from asdf.versioning import AsdfVersion @@ -117,7 +116,7 @@ def test_asdf_file_extensions(): extension = TestExtension(extension_uri="asdf://somewhere.org/extensions/foo-1.0") - for arg in ([extension], extension, AsdfExtensionList([extension])): + for arg in ([extension], extension): af = AsdfFile(extensions=arg) assert af.extensions == [ExtensionProxy(extension)] @@ -178,7 +177,7 @@ def test_open_asdf_extensions(tmp_path): with open_asdf(path) as af: assert af.extensions == [] - for arg in ([extension], extension, AsdfExtensionList([extension])): + for arg in ([extension], extension): with open_asdf(path, extensions=arg) as af: assert af.extensions == [ExtensionProxy(extension)] @@ -234,17 +233,6 @@ def test_reading_extension_metadata(): with assert_no_warnings(): open_asdf(buff) - # Test legacy extension matching by actual class name: - content = """ - history: - extensions: - - !core/extension_metadata-1.0.0 - extension_class: asdf._tests.test_asdf.TestExtension - """ - buff = yaml_to_asdf(content) - with assert_no_warnings(): - open_asdf(buff) - # Test matching by URI: content = """ history: diff --git a/asdf/_tests/test_config.py b/asdf/_tests/test_config.py index 83b85d32e..0b90ec14a 100644 --- a/asdf/_tests/test_config.py +++ b/asdf/_tests/test_config.py @@ -7,7 +7,6 @@ from asdf import get_config from asdf.core._integration import get_json_schema_resource_mappings from asdf.extension import ExtensionProxy -from asdf.extension._legacy import BuiltinExtension from asdf.resource import ResourceMappingProxy @@ -255,14 +254,6 @@ def test_resource_manager(): def test_extensions(): with asdf.config_context() as config: original_extensions = config.extensions - assert any(isinstance(e.delegate, BuiltinExtension) for e in original_extensions) - - class FooExtension: - types = [] - tag_mapping = [] - url_mapping = [] - - new_extension = FooExtension() class BarExtension: extension_uri = "asdf://somewhere.org/extensions/bar-1.0" @@ -273,25 +264,25 @@ class BarExtension: uri_extension = BarExtension() # Add an extension: - config.add_extension(new_extension) + config.add_extension(uri_extension) assert len(config.extensions) == len(original_extensions) + 1 - assert any(e for e in config.extensions if e.delegate is new_extension) + assert any(e for e in config.extensions if e.delegate is uri_extension) # Adding an extension should be idempotent: - config.add_extension(new_extension) + config.add_extension(uri_extension) assert len(config.extensions) == len(original_extensions) + 1 # Even when wrapped: - config.add_extension(ExtensionProxy(new_extension)) + config.add_extension(ExtensionProxy(uri_extension)) assert len(config.extensions) == len(original_extensions) + 1 # Remove an extension: - config.remove_extension(new_extension) + config.remove_extension(uri_extension) assert len(config.extensions) == len(original_extensions) # Removing should work when wrapped: - config.add_extension(new_extension) - config.remove_extension(ExtensionProxy(new_extension)) + config.add_extension(uri_extension) + config.remove_extension(ExtensionProxy(uri_extension)) assert len(config.extensions) == len(original_extensions) # And also by URI: @@ -305,14 +296,13 @@ class BarExtension: assert len(config.extensions) == len(original_extensions) # Remove by the name of the extension's package: - config.add_extension(ExtensionProxy(new_extension, package_name="foo")) config.add_extension(ExtensionProxy(uri_extension, package_name="foo")) config.remove_extension(package="foo") assert len(config.extensions) == len(original_extensions) # Can combine remove filters: - config.add_extension(ExtensionProxy(new_extension, package_name="foo")) config.add_extension(ExtensionProxy(uri_extension, package_name="foo")) + config.add_extension(ExtensionProxy(uri_extension, package_name="bar")) config.remove_extension(uri_extension.extension_uri, package="foo") assert len(config.extensions) == len(original_extensions) + 1 @@ -321,14 +311,13 @@ class BarExtension: config.remove_extension() # Removing an extension should be idempotent: - config.add_extension(new_extension) - config.remove_extension(new_extension) - config.remove_extension(new_extension) + config.add_extension(uri_extension) + config.remove_extension(uri_extension) + config.remove_extension(uri_extension) assert len(config.extensions) == len(original_extensions) # Resetting should get rid of any additions: - config.add_extension(new_extension) - config.add_extension(FooExtension()) + config.add_extension(uri_extension) config.reset_extensions() assert len(config.extensions) == len(original_extensions) diff --git a/asdf/_tests/test_deprecated.py b/asdf/_tests/test_deprecated.py index 2e0623894..fa5ee6016 100644 --- a/asdf/_tests/test_deprecated.py +++ b/asdf/_tests/test_deprecated.py @@ -2,35 +2,9 @@ import pytest -import asdf -import asdf._types -import asdf.extension -import asdf.testing.helpers -from asdf._tests._helpers import assert_extension_correctness -from asdf._tests.objects import CustomExtension -from asdf._types import CustomType from asdf.exceptions import AsdfDeprecationWarning -def test_custom_type_warning(): - with pytest.warns(AsdfDeprecationWarning, match=r"^.* subclasses the deprecated CustomType .*$"): - - class NewCustomType(CustomType): - pass - - -def test_assert_extension_correctness_deprecation(): - extension = CustomExtension() - with pytest.warns(AsdfDeprecationWarning, match="assert_extension_correctness is deprecated.*"): - assert_extension_correctness(extension) - - -def test_asdf_type_format_tag(): - with pytest.warns(AsdfDeprecationWarning, match="asdf.types.format_tag is deprecated"): - asdf._types.format_tag - asdf.testing.helpers.format_tag - - def test_asdf_stream_deprecation(): with pytest.warns(AsdfDeprecationWarning, match="asdf.stream is deprecated"): if "asdf.stream" in sys.modules: diff --git a/asdf/_tests/test_extension.py b/asdf/_tests/test_extension.py index 821760d7a..1c351d326 100644 --- a/asdf/_tests/test_extension.py +++ b/asdf/_tests/test_extension.py @@ -1,11 +1,11 @@ +import fractions + import pytest from packaging.specifiers import SpecifierSet from yaml.representer import RepresenterError from asdf import AsdfFile, config_context -from asdf._tests._helpers import assert_extension_correctness -from asdf._types import CustomType -from asdf.exceptions import AsdfDeprecationWarning, AsdfWarning, ValidationError +from asdf.exceptions import AsdfWarning, ValidationError from asdf.extension import ( Compressor, Converter, @@ -18,30 +18,9 @@ Validator, get_cached_extension_manager, ) -from asdf.extension._legacy import BuiltinExtension, _AsdfExtension, get_cached_asdf_extension_list from asdf.testing.helpers import roundtrip_object -def test_builtin_extension(): - extension = BuiltinExtension() - with pytest.warns(AsdfDeprecationWarning, match="assert_extension_correctness is deprecated.*"): - assert_extension_correctness(extension) - - -with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class LegacyType(dict, CustomType): - organization = "somewhere.org" - name = "test" - version = "1.0.0" - - -class LegacyExtension: - types = [LegacyType] - tag_mapping = [("tag:somewhere.org/", "http://somewhere.org/{tag_suffix}")] - url_mapping = [("http://somewhere.org/", "http://somewhere.org/{url_suffix}.yaml")] - - class MinimumExtension: extension_uri = "asdf://somewhere.org/extensions/minimum-1.0" @@ -178,7 +157,6 @@ def test_extension_proxy(): proxy = ExtensionProxy(extension) assert isinstance(proxy, Extension) - assert isinstance(proxy, _AsdfExtension) assert proxy.extension_uri == "asdf://somewhere.org/extensions/minimum-1.0" assert proxy.legacy_class_names == set() @@ -321,25 +299,6 @@ def test_extension_proxy_tags(): assert proxy.converters[0].tags == [foo_tag_uri] -def test_extension_proxy_legacy(): - extension = LegacyExtension() - proxy = ExtensionProxy(extension, package_name="foo", package_version="1.2.3") - - assert proxy.extension_uri is None - assert proxy.legacy_class_names == {"asdf._tests.test_extension.LegacyExtension"} - assert proxy.asdf_standard_requirement == SpecifierSet() - assert proxy.converters == [] - assert proxy.tags == [] - assert proxy.types == [LegacyType] - assert proxy.tag_mapping == LegacyExtension.tag_mapping - assert proxy.url_mapping == LegacyExtension.url_mapping - assert proxy.delegate is extension - assert proxy.legacy is True - assert proxy.package_name == "foo" - assert proxy.package_version == "1.2.3" - assert proxy.class_name == "asdf._tests.test_extension.LegacyExtension" - - def test_extension_proxy_hash_and_eq(): extension = MinimumExtension() proxy1 = ExtensionProxy(extension) @@ -362,11 +321,6 @@ def test_extension_proxy_repr(): assert "package: (none)" in repr(proxy) assert "legacy: False" in repr(proxy) - proxy = ExtensionProxy(LegacyExtension(), package_name="foo", package_version="1.2.3") - assert "class: asdf._tests.test_extension.LegacyExtension" in repr(proxy) - assert "package: foo==1.2.3" in repr(proxy) - assert "legacy: True" in repr(proxy) - def test_extension_manager(): converter1 = FullConverter( @@ -625,13 +579,6 @@ class FooExtension(Extension): roundtrip_object(tree) -def test_get_cached_asdf_extension_list(): - extension = LegacyExtension() - extension_list = get_cached_asdf_extension_list([extension]) - assert get_cached_asdf_extension_list([extension]) is extension_list - assert get_cached_asdf_extension_list([LegacyExtension()]) is not extension_list - - def test_manifest_extension(): with config_context() as config: minimal_manifest = """%YAML 1.1 @@ -948,3 +895,49 @@ def from_yaml_tree(self, node, tag, ctx): with config_context() as config: with ctx_type(exception_class, match="Converter handles multiple tags"): config.add_extension(extension) + + +def test_reference_cycle(): + class FractionWithInverse(fractions.Fraction): + def __init__(self, *args, **kwargs): + self._inverse = None + + @property + def inverse(self): + return self._inverse + + @inverse.setter + def inverse(self, value): + self._inverse = value + + class FractionWithInverseConverter: + tags = ["asdf://example.com/fractions/tags/fraction-1.0.0"] + types = [FractionWithInverse] + + def to_yaml_tree(self, obj, tag, ctx): + return { + "numerator": obj.numerator, + "denominator": obj.denominator, + "inverse": obj.inverse, + } + + def from_yaml_tree(self, node, tag, ctx): + obj = FractionWithInverse(node["numerator"], node["denominator"]) + yield obj + obj.inverse = node["inverse"] + + class FractionWithInverseExtension: + tags = FractionWithInverseConverter.tags + converters = [FractionWithInverseConverter()] + extension_uri = "asdf://example.com/fractions/extensions/fraction-1.0.0" + + with config_context() as cfg: + cfg.add_extension(FractionWithInverseExtension()) + + f1 = FractionWithInverse(3, 5) + f2 = FractionWithInverse(5, 3) + f1.inverse = f2 + f2.inverse = f1 + + read_f1 = roundtrip_object(f1) + assert read_f1.inverse.inverse is read_f1 diff --git a/asdf/_tests/test_helpers.py b/asdf/_tests/test_helpers.py deleted file mode 100644 index 8c02ff6ec..000000000 --- a/asdf/_tests/test_helpers.py +++ /dev/null @@ -1,50 +0,0 @@ -import pytest - -from asdf import _types as types -from asdf._tests._helpers import assert_roundtrip_tree -from asdf.exceptions import AsdfConversionWarning, AsdfDeprecationWarning, AsdfWarning - - -def test_conversion_error(tmp_path): - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class FooType(types.CustomType): - name = "foo" - - def __init__(self, a, b): - self.a = a - self.b = b - - @classmethod - def from_tree(cls, tree, ctx): - msg = "This allows us to test the failure" - raise TypeError(msg) - - @classmethod - def to_tree(cls, node, ctx): - return {"a": node.a, "b": node.b} - - def __eq__(self, other): - return self.a == other.a and self.b == other.b - - class FooExtension: - @property - def types(self): - return [FooType] - - @property - def tag_mapping(self): - return [] - - @property - def url_mapping(self): - return [] - - foo = FooType(10, "hello") - tree = {"foo": foo} - - with pytest.raises( - AsdfConversionWarning, - match=r"Failed to convert .* to custom type .* Using raw Python data structure instead", - ), pytest.warns(AsdfWarning, match=r"Unable to locate schema file"): - assert_roundtrip_tree(tree, tmp_path, extensions=FooExtension()) diff --git a/asdf/_tests/test_history.py b/asdf/_tests/test_history.py index ecbb6a5ee..2f1120717 100644 --- a/asdf/_tests/test_history.py +++ b/asdf/_tests/test_history.py @@ -6,7 +6,7 @@ import asdf from asdf.exceptions import AsdfWarning, ValidationError -from asdf.extension import Converter, Extension +from asdf.extension import Converter, Extension, ExtensionProxy from asdf.tags.core import HistoryEntry from asdf.testing import helpers @@ -131,30 +131,41 @@ def test_missing_extension_warning(): def test_extension_version_warning(): - yaml = """ + uri = "asdf://somewhere.org/extensions/foo-1.0.0" + package_name = "foo" + file_package_version = "2.0.0" + installed_package_version = "1.0.0" + + class FooExtension: + extension_uri = uri + + yaml = f""" history: extensions: - !core/extension_metadata-1.0.0 - extension_class: asdf.extension.BuiltinExtension + extension_class: {FooExtension.__qualname__} + extension_uri: {uri} software: !core/software-1.0.0 - name: asdf - version: 100.0.3 + name: {package_name} + version: {file_package_version} """ buff = helpers.yaml_to_asdf(yaml) - with pytest.warns( - AsdfWarning, - match=r"File was created with extension class 'asdf.extension.BuiltinExtension'", - ), asdf.open(buff): - pass + with asdf.config_context() as cfg: + cfg.add_extension(ExtensionProxy(FooExtension(), package_name, installed_package_version)) + with pytest.warns( + AsdfWarning, + match=f"older package \\({package_name}=={installed_package_version}\\)", + ), asdf.open(buff): + pass - buff.seek(0) + buff.seek(0) - # Make sure suppressing the warning works too - with warnings.catch_warnings(): - warnings.simplefilter("error") - with asdf.open(buff, ignore_missing_extensions=True): - pass + # Make sure suppressing the warning works too + with warnings.catch_warnings(): + warnings.simplefilter("error") + with asdf.open(buff, ignore_missing_extensions=True): + pass def test_strict_extension_check(): diff --git a/asdf/_tests/test_resolver.py b/asdf/_tests/test_resolver.py deleted file mode 100644 index 93da921d7..000000000 --- a/asdf/_tests/test_resolver.py +++ /dev/null @@ -1,116 +0,0 @@ -import pytest - -from asdf._resolver import Resolver, ResolverChain -from asdf.exceptions import AsdfDeprecationWarning - - -def test_resolver_no_mappings(): - r = Resolver([], "test") - assert r("united_states:maryland:baltimore") == "united_states:maryland:baltimore" - - -def test_resolver_tuple_mapping(): - r = Resolver([("united_states:", "earth:{test}")], "test") - assert r("united_states:maryland:baltimore") == "earth:united_states:maryland:baltimore" - - r = Resolver([("united_states:", "{test_prefix}texas:houston")], "test") - assert r("united_states:maryland:baltimore") == "united_states:texas:houston" - - r = Resolver([("united_states:", "{test_suffix}:hampden")], "test") - assert r("united_states:maryland:baltimore") == "maryland:baltimore:hampden" - - -def test_resolver_callable_mapping(): - r = Resolver([lambda inp: "nowhere"], "test") - assert r("united_states:maryland:baltimore") == "nowhere" - - -def test_resolver_multiple_mappings(): - r = Resolver( - [ - ("united_states:", "unknown_region:{test_suffix}"), - ("united_states:maryland:", "mid_atlantic:maryland:{test_suffix}"), - ], - "test", - ) - # Should choose the mapping with the longest matched prefix: - assert r("united_states:maryland:baltimore") == "mid_atlantic:maryland:baltimore" - - r = Resolver( - [ - ("united_states:", "unknown_region:{test_suffix}"), - lambda inp: "nowhere", - ("united_states:maryland:", "mid_atlantic:maryland:{test_suffix}"), - ], - "test", - ) - # Should prioritize the mapping offered by the callable: - assert r("united_states:maryland:baltimore") == "nowhere" - - r = Resolver( - [ - ("united_states:", "unknown_region:{test_suffix}"), - lambda inp: None, - ("united_states:maryland:", "mid_atlantic:maryland:{test_suffix}"), - ], - "test", - ) - # None from the callable is a signal that it can't handle the input, - # so we should fall back to the longest matched prefix: - assert r("united_states:maryland:baltimore") == "mid_atlantic:maryland:baltimore" - - -def test_resolver_non_prefix(): - r = Resolver([("maryland:", "shouldn't happen")], "test") - assert r("united_states:maryland:baltimore") == "united_states:maryland:baltimore" - - -def test_resolver_invalid_mapping(): - with pytest.raises(ValueError, match=r"Invalid mapping .*"): - Resolver([("foo",)], "test") - - with pytest.raises(ValueError, match=r"Invalid mapping .*"): - Resolver([12], "test") - - -def test_resolver_hash_and_equals(): - r1 = Resolver([("united_states:", "earth:{test}")], "test") - r2 = Resolver([("united_states:", "earth:{test}")], "test") - r3 = Resolver([("united_states:", "{test}:hampden")], "test") - - assert hash(r1) == hash(r2) - assert r1 == r2 - - assert hash(r1) != hash(r3) - assert r1 != r3 - - -def test_resolver_add_mapping_deprecated(): - r = Resolver([], "test") - with pytest.warns(AsdfDeprecationWarning): - r.add_mapping([("united_states:", "earth:{test}")], "test") - - -def test_resolver_chain(): - r1 = Resolver([("maryland:", "united_states:{test}")], "test") - r2 = Resolver([("united_states:", "earth:{test}")], "test") - - chain = ResolverChain(r1, r2) - - assert chain("maryland:baltimore") == "earth:united_states:maryland:baltimore" - - -def test_resolver_chain_hash_and_equals(): - r1 = Resolver([("united_states:", "earth:{test}")], "test") - r2 = Resolver([("united_states:", "earth:{test}")], "test") - r3 = Resolver([("united_states:", "{test}:hampden")], "test") - - c1 = ResolverChain(r1, r3) - c2 = ResolverChain(r2, r3) - c3 = ResolverChain(r1, r2) - - assert hash(c1) == hash(c2) - assert c1 == c2 - - assert hash(c1) != hash(c3) - assert c1 != c3 diff --git a/asdf/_tests/test_schema.py b/asdf/_tests/test_schema.py index fd21af032..7496d5bbb 100644 --- a/asdf/_tests/test_schema.py +++ b/asdf/_tests/test_schema.py @@ -1,3 +1,4 @@ +import contextlib import io from datetime import datetime @@ -7,33 +8,59 @@ import asdf import asdf.testing.helpers -from asdf import _resolver as resolver -from asdf import _types as types from asdf import config_context, constants, get_config, schema, tagged, util, yamlutil from asdf._tests import _helpers as helpers -from asdf._tests.objects import CustomExtension from asdf.exceptions import AsdfConversionWarning, AsdfDeprecationWarning, AsdfWarning, ValidationError -from asdf.extension import _legacy as _legacy_extension +from asdf.extension import TagDefinition -with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - class TagReferenceType(types.CustomType): - """ - This class is used by several tests below for validating foreign type - references in schemas and ASDF files. - """ +@contextlib.contextmanager +def tag_reference_extension(): + class TagReference: + def __init__(self, name, things): + self.name = name + self.things = things + + tag_uri = "tag:nowhere.org:custom/tag_reference-1.0.0" + schema_uri = "http://nowhere.org/schemas/custom/tag_reference-1.0.0" + tag_def = asdf.extension.TagDefinition(tag_uri, schema_uris=schema_uri) - name = "tag_reference" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" + class TagReferenceConverter: + tags = [tag_uri] + types = [TagReference] - @classmethod - def from_tree(cls, tree, ctx): - node = {} - node["name"] = tree["name"] - node["things"] = tree["things"] - return node + def to_yaml_tree(self, obj, tag, ctx): + return {"name": obj.name, "things": obj.things} + + def from_yaml_tree(self, node, tag, ctx): + return TagReference(node["name"], node["things"]) + + class TagReferenceExtension: + tags = [tag_def] + extension_uri = "asdf://nowhere.org/extensions/tag_reference-1.0.0" + converters = [TagReferenceConverter()] + + tag_schema = f""" +%YAML 1.1 +--- +$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" +id: {schema_uri} +title: An example custom type for testing tag references + +type: object +properties: + name: + type: string + things: + $ref: "http://stsci.edu/schemas/asdf/core/ndarray-1.0.0" +required: [name, things] +... + """ + + with config_context() as cfg: + cfg.add_resource_mapping({schema_uri: tag_schema}) + cfg.add_extension(TagReferenceExtension()) + yield def test_tagging_scalars(): @@ -108,11 +135,12 @@ def test_load_schema_with_full_tag(tmp_path): schema_path = tmp_path / "nugatory.yaml" schema_path.write_bytes(schema_def.encode()) - schema_tree = schema.load_schema(str(schema_path), resolve_references=True) + with pytest.warns(AsdfDeprecationWarning, match="Resolving by tag is deprecated"): + schema_tree = schema.load_schema(str(schema_path), resolve_references=True) schema.check_schema(schema_tree) -def test_load_schema_with_tag_address(tmp_path): +def test_load_schema_with_file_url(tmp_path): schema_def = """ %YAML 1.1 %TAG !asdf! tag:stsci.edu:asdf/ @@ -136,32 +164,6 @@ def test_load_schema_with_tag_address(tmp_path): schema.check_schema(schema_tree) -def test_load_schema_with_file_url(tmp_path): - schema_def = """ -%YAML 1.1 -%TAG !asdf! tag:stsci.edu:asdf/ ---- -$schema: "http://stsci.edu/schemas/asdf/asdf-schema-1.0.0" -id: "http://stsci.edu/schemas/asdf/nugatory/nugatory-1.0.0" -tag: "tag:stsci.edu:asdf/nugatory/nugatory-1.0.0" - -type: object -properties: - foobar: - $ref: "{}" - -required: [foobar] -... - """.format( - _legacy_extension.get_default_resolver()("tag:stsci.edu:asdf/core/ndarray-1.0.0"), - ) - schema_path = tmp_path / "nugatory.yaml" - schema_path.write_bytes(schema_def.encode()) - - schema_tree = schema.load_schema(str(schema_path), resolve_references=True) - schema.check_schema(schema_tree) - - def test_load_schema_with_asdf_uri_scheme(): subschema_content = """%YAML 1.1 --- @@ -257,16 +259,6 @@ def test_schema_caching(): assert s1 is not s2 -def test_asdf_file_resolver_hashing(): - # Confirm that resolvers from distinct AsdfFile instances - # hash to the same value (this allows schema caching to function). - a1 = asdf.AsdfFile() - a2 = asdf.AsdfFile() - - assert hash(a1._resolver) == hash(a2._resolver) - assert a1._resolver == a2._resolver - - def test_load_schema_from_resource_mapping(): content = b""" id: http://somewhere.org/schemas/razmataz-1.0.0 @@ -286,49 +278,100 @@ def test_load_schema_from_resource_mapping(): def test_flow_style(): - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): + class CustomFlow: + def __init__(self, a, b): + self.a = a + self.b = b - class CustomFlowStyleType(dict, types.CustomType): - name = "custom_flow" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" + tag_uri = "http://nowhere.org/tags/custom/custom_flow-1.0.0" - class CustomFlowStyleExtension(CustomExtension): - @property - def types(self): - return [CustomFlowStyleType] + class CustomFlowConverter: + tags = [tag_uri] + types = [CustomFlow] - tree = {"custom_flow": CustomFlowStyleType({"a": 42, "b": 43})} + def to_yaml_tree(self, obj, tag, ctx): + return {"a": obj.a, "b": obj.b} - buff = io.BytesIO() - ff = asdf.AsdfFile(tree, extensions=CustomFlowStyleExtension()) - ff.write_to(buff) + def from_yaml_tree(self, node, tag, ctx): + return CustomFlow(node["a"], node["b"]) - assert b" a: 42\n b: 43" in buff.getvalue() + schema_uri = "http://nowhere.org/schemas/custom/custom_flow-1.0.0" + tag_schema = f""" +%YAML 1.1 +--- +$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" +id: "{schema_uri}" +type: object +properties: + a: + type: number + b: + type: number +flowStyle: block + """ + + tag_def = TagDefinition(tag_uri, schema_uris=[schema_uri]) + + class CustomFlowExtension: + extension_uri = "http://nowhere.org/extensions/custom/custom_flow-1.0.0" + tags = [tag_def] + converters = [CustomFlowConverter()] + + with config_context() as cfg: + cfg.add_extension(CustomFlowExtension()) + cfg.add_resource_mapping({schema_uri: tag_schema}) + buff = io.BytesIO() + ff = asdf.AsdfFile({"custom_flow": CustomFlow(42, 43)}) + ff.write_to(buff) + + assert b" a: 42\n b: 43" in buff.getvalue() def test_style(): - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): + class CustomStyle: + def __init__(self, message): + self.message = message - class CustomStyleType(str, types.CustomType): - name = "custom_style" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" + tag_uri = "http://nowhere.org/tags/custom/custom_style-1.0.0" - class CustomStyleExtension(CustomExtension): - @property - def types(self): - return [CustomStyleType] + class CustomStyleConverter: + tags = [tag_uri] + types = [CustomStyle] - tree = {"custom_style": CustomStyleType("short")} + def to_yaml_tree(self, obj, tag, ctx): + return obj.message - buff = io.BytesIO() - ff = asdf.AsdfFile(tree, extensions=CustomStyleExtension()) - ff.write_to(buff) + def from_yaml_tree(self, node, tag, ctx): + return CustomStyle(node) + + schema_uri = "http://nowhere.org/schemas/custom/custom_style-1.0.0" + tag_schema = f""" +%YAML 1.1 +--- +$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" +id: "{schema_uri}" +type: string +style: literal + """ + + tag_def = TagDefinition(tag_uri, schema_uris=[schema_uri]) - assert b"|-\n short\n" in buff.getvalue() + class CustomStyleExtension: + extension_uri = "http://nowhere.org/extensions/custom/custom_style-1.0.0" + tags = [tag_def] + converters = [CustomStyleConverter()] + + with config_context() as cfg: + cfg.add_extension(CustomStyleExtension()) + cfg.add_resource_mapping({schema_uri: tag_schema}) + + tree = {"custom_style": CustomStyle("short")} + + buff = io.BytesIO() + ff = asdf.AsdfFile(tree) + ff.write_to(buff) + + assert b"|-\n short\n" in buff.getvalue() def test_property_order(): @@ -350,51 +393,71 @@ def test_property_order(): def test_invalid_nested(): - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): + tag_uri = "http://nowhere.org/tags/custom/custom-1.0.0" + schema_uri = "http://nowhere.org/schemas/custom/custom-1.0.0" + tag_schema = f""" +%YAML 1.1 +--- +$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" +id: "{schema_uri}" +type: integer +default: 42 + """ - class CustomType(str, types.CustomType): - name = "custom" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" + class Custom: + def __init__(self, value): + self.value = value - class CustomTypeExtension(CustomExtension): - @property - def types(self): - return [CustomType] + class CustomConverter: + tags = [tag_uri] + types = [Custom] - yaml = """ -custom: ! + def to_yaml_tree(self, obj, tag, ctx): + return obj.value + + def from_yaml_tree(self, node, tag, ctx): + return Custom(node) + + tag_def = TagDefinition(tag_uri, schema_uris=[schema_uri]) + + class CustomExtension: + extension_uri = "http://nowhere.org/extensions/custom/custom-1.0.0" + tags = [tag_def] + converters = [CustomConverter()] + + yaml = f""" +custom: !<{tag_uri}> foo """ buff = helpers.yaml_to_asdf(yaml) # This should cause a warning but not an error because without explicitly # providing an extension, our custom type will not be recognized and will # simply be converted to a raw type. - with pytest.warns(AsdfConversionWarning, match=r"tag:nowhere.org:custom/custom-1.0.0"), asdf.open(buff): + with pytest.warns(AsdfConversionWarning, match=tag_uri), asdf.open(buff): pass buff.seek(0) - with pytest.raises(ValidationError, match=r".* is not of type .*"), asdf.open( - buff, - extensions=[CustomTypeExtension()], - ): - pass - - # Make sure tags get validated inside of other tags that know - # nothing about them. - yaml = """ -array: !core/ndarray-1.0.0 - data: [0, 1, 2] - custom: ! - foo - """ - buff = helpers.yaml_to_asdf(yaml) - with pytest.raises(ValidationError, match=r".* is not of type .*"), asdf.open( - buff, - extensions=[CustomTypeExtension()], - ): - pass + with config_context() as cfg: + cfg.add_extension(CustomExtension()) + cfg.add_resource_mapping({schema_uri: tag_schema}) + with pytest.raises(ValidationError, match=r".* is not of type .*"), asdf.open( + buff, + ): + pass + + # Make sure tags get validated inside of other tags that know + # nothing about them. + yaml = f""" + array: !core/ndarray-1.0.0 + data: [0, 1, 2] + custom: !<{tag_uri}> + foo + """ + buff = helpers.yaml_to_asdf(yaml) + with pytest.raises(ValidationError, match=r".* is not of type .*"), asdf.open( + buff, + ): + pass def test_invalid_schema(): @@ -451,75 +514,145 @@ def test_check_complex_default(): def test_fill_and_remove_defaults(): - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): + tag_uri = "http://nowhere.org/tags/custom/default-1.0.0" + schema_uri = "http://nowhere.org/schemas/custom/default-1.0.0" + tag_schema = f""" +%YAML 1.1 +--- +$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" +id: "{schema_uri}" +type: object +properties: + a: + type: integer + default: 42 + b: + type: object + properties: + c: + type: integer + default: 82 + d: + allOf: + - type: object + properties: + e: + type: integer + default: 122 + - type: object + properties: + f: + type: integer + default: 162 + g: + anyOf: + - type: object + properties: + h: + type: integer + default: 202 + - type: object + properties: + i: + type: integer + default: 242 + j: + oneOf: + - type: object + properties: + k: + type: integer + default: 282 + required: [k] + additionalProperties: false + - type: object + properties: + l: + type: integer + default: 322 + required: [l] + additionalProperties: false + """ - class DefaultType(dict, types.CustomType): - name = "default" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" + class Default(dict): + pass - class DefaultTypeExtension(CustomExtension): - @property - def types(self): - return [DefaultType] + class DefaultConverter: + tags = [tag_uri] + types = [Default] - yaml = """ -custom: ! + def to_yaml_tree(self, obj, tag, ctx): + return dict(obj) + + def from_yaml_tree(self, node, tag, ctx): + return Default(**node) + + tag_def = TagDefinition(tag_uri, schema_uris=[schema_uri]) + + class DefaultExtension: + tags = [tag_def] + converters = [DefaultConverter()] + extension_uri = "http://nowhere.org/extensions/custom/default-1.0.0" + + with config_context() as cfg: + cfg.add_extension(DefaultExtension()) + cfg.add_resource_mapping({schema_uri: tag_schema}) + yaml = """ +custom: ! b: {} d: {} g: {} j: l: 362 - """ - buff = helpers.yaml_to_asdf(yaml) - with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: - assert "a" in ff.tree["custom"] - assert ff.tree["custom"]["a"] == 42 - assert ff.tree["custom"]["b"]["c"] == 82 - # allOf combiner should fill defaults from all subschemas: - assert ff.tree["custom"]["d"]["e"] == 122 - assert ff.tree["custom"]["d"]["f"] == 162 - # anyOf combiners should be ignored: - assert "h" not in ff.tree["custom"]["g"] - assert "i" not in ff.tree["custom"]["g"] - # oneOf combiners should be ignored: - assert "k" not in ff.tree["custom"]["j"] - assert ff.tree["custom"]["j"]["l"] == 362 - - buff.seek(0) - with config_context() as config: - config.legacy_fill_schema_defaults = False - with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: - assert "a" not in ff.tree["custom"] - assert "c" not in ff.tree["custom"]["b"] - assert "e" not in ff.tree["custom"]["d"] - assert "f" not in ff.tree["custom"]["d"] - assert "h" not in ff.tree["custom"]["g"] - assert "i" not in ff.tree["custom"]["g"] - assert "k" not in ff.tree["custom"]["j"] - assert ff.tree["custom"]["j"]["l"] == 362 - ff.fill_defaults() + """ + buff = helpers.yaml_to_asdf(yaml) + with asdf.open(buff) as ff: assert "a" in ff.tree["custom"] assert ff.tree["custom"]["a"] == 42 - assert "c" in ff.tree["custom"]["b"] - assert ff.tree["custom"]["b"]["c"] == 82 assert ff.tree["custom"]["b"]["c"] == 82 + # allOf combiner should fill defaults from all subschemas: assert ff.tree["custom"]["d"]["e"] == 122 assert ff.tree["custom"]["d"]["f"] == 162 + # anyOf combiners should be ignored: assert "h" not in ff.tree["custom"]["g"] assert "i" not in ff.tree["custom"]["g"] + # oneOf combiners should be ignored: assert "k" not in ff.tree["custom"]["j"] assert ff.tree["custom"]["j"]["l"] == 362 - ff.remove_defaults() - assert "a" not in ff.tree["custom"] - assert "c" not in ff.tree["custom"]["b"] - assert "e" not in ff.tree["custom"]["d"] - assert "f" not in ff.tree["custom"]["d"] - assert "h" not in ff.tree["custom"]["g"] - assert "i" not in ff.tree["custom"]["g"] - assert "k" not in ff.tree["custom"]["j"] - assert ff.tree["custom"]["j"]["l"] == 362 + + buff.seek(0) + with config_context() as config: + config.legacy_fill_schema_defaults = False + with asdf.open(buff) as ff: + assert "a" not in ff.tree["custom"] + assert "c" not in ff.tree["custom"]["b"] + assert "e" not in ff.tree["custom"]["d"] + assert "f" not in ff.tree["custom"]["d"] + assert "h" not in ff.tree["custom"]["g"] + assert "i" not in ff.tree["custom"]["g"] + assert "k" not in ff.tree["custom"]["j"] + assert ff.tree["custom"]["j"]["l"] == 362 + ff.fill_defaults() + assert "a" in ff.tree["custom"] + assert ff.tree["custom"]["a"] == 42 + assert "c" in ff.tree["custom"]["b"] + assert ff.tree["custom"]["b"]["c"] == 82 + assert ff.tree["custom"]["b"]["c"] == 82 + assert ff.tree["custom"]["d"]["e"] == 122 + assert ff.tree["custom"]["d"]["f"] == 162 + assert "h" not in ff.tree["custom"]["g"] + assert "i" not in ff.tree["custom"]["g"] + assert "k" not in ff.tree["custom"]["j"] + assert ff.tree["custom"]["j"]["l"] == 362 + ff.remove_defaults() + assert "a" not in ff.tree["custom"] + assert "c" not in ff.tree["custom"]["b"] + assert "e" not in ff.tree["custom"]["d"] + assert "f" not in ff.tree["custom"]["d"] + assert "h" not in ff.tree["custom"]["g"] + assert "i" not in ff.tree["custom"]["g"] + assert "k" not in ff.tree["custom"]["j"] + assert ff.tree["custom"]["j"]["l"] == 362 def test_one_of(): @@ -527,34 +660,69 @@ def test_one_of(): Covers https://github.com/asdf-format/asdf/issues/809 """ - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): + class OneOf: + def __init__(self, value): + self.value = value - class OneOfType(dict, types.CustomType): - name = "one_of" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" + tag_uri = "http://nowhere.org/custom/one_of-1.0.0" - class OneOfTypeExtension(CustomExtension): - @property - def types(self): - return [OneOfType] + class OneOfConverter: + tags = [tag_uri] + types = [OneOf] - yaml = """ -one_of: ! + def to_yaml_tree(self, obj, tag, ctx): + return {"value": obj.value} + + def from_yaml_tree(self, node, tag, ctx): + return OneOf(node["value"]) + + schema_uri = "http://nowhere.org/schemas/custom/one_of-1.0.0" + tag_schema = f""" +%YAML 1.1 +--- +$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" +id: "{schema_uri}" +title: | + oneOf test schema +oneOf: + - type: object + properties: + value: + type: number + required: [value] + additionalProperties: false + + - type: object + properties: + value: + type: string + required: [value] + additionalProperties: false +... + """ + + tag_def = TagDefinition(tag_uri, schema_uris=[schema_uri]) + + class OneOfExtension: + extension_uri = "http://nowhere.org/extensions/custom/one_of-1.0.0" + tags = [tag_def] + converters = [OneOfConverter()] + + yaml = f""" +one_of: !<{tag_uri}> value: foo """ - buff = helpers.yaml_to_asdf(yaml) - with asdf.open(buff, extensions=[OneOfTypeExtension()]) as ff: - assert ff["one_of"]["value"] == "foo" + with config_context() as cfg: + cfg.add_extension(OneOfExtension()) + cfg.add_resource_mapping({schema_uri: tag_schema}) + + buff = helpers.yaml_to_asdf(yaml) + with asdf.open(buff) as ff: + assert ff["one_of"].value == "foo" -def test_tag_reference_validation(): - class DefaultTypeExtension(CustomExtension): - @property - def types(self): - return [TagReferenceType] +def test_tag_reference_validation(): yaml = """ custom: ! name: @@ -563,33 +731,53 @@ def types(self): data: [1, 2, 3] """ - buff = helpers.yaml_to_asdf(yaml) - with asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: - custom = ff.tree["custom"] - assert custom["name"] == "Something" - assert_array_equal(custom["things"], [1, 2, 3]) + with tag_reference_extension(): + buff = helpers.yaml_to_asdf(yaml) + with asdf.open(buff) as ff: + custom = ff.tree["custom"] + assert custom.name == "Something" + assert_array_equal(custom.things, [1, 2, 3]) def test_foreign_tag_reference_validation(): - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class ForeignTagReferenceType(types.CustomType): - name = "foreign_tag_reference" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" - - @classmethod - def from_tree(cls, tree, ctx): - node = {} - node["a"] = tree["a"] - node["b"] = tree["b"] - return node - - class ForeignTypeExtension(CustomExtension): - @property - def types(self): - return [TagReferenceType, ForeignTagReferenceType] + class ForeignTagReference: + def __init__(self, a): + self.a = a + + tag_uri = "tag:nowhere.org:custom/foreign_tag_reference-1.0.0" + schema_uri = "http://nowhere.org/schemas/custom/foreign_tag_reference-1.0.0" + tag_def = asdf.extension.TagDefinition(tag_uri, schema_uris=schema_uri) + + class ForeignTagReferenceConverter: + tags = [tag_uri] + types = [ForeignTagReference] + + def to_yaml_tree(self, obj, tag, ctx): + return {"a": obj.a} + + def from_yaml_tree(self, node, tag, ctx): + return ForeignTagReference(node["a"]) + + class ForeignTagReferenceExtension: + tags = [tag_def] + extension_uri = "asdf://nowhere.org/extensions/foreign_tag_reference-1.0.0" + converters = [ForeignTagReferenceConverter()] + + tag_schema = f""" +%YAML 1.1 +--- +$schema: "http://stsci.edu/schemas/yaml-schema/draft-01" +id: {schema_uri} +title: An example custom type for testing tag references + +type: object +properties: + a: + # Test foreign tag reference using tag URI + $ref: "http://nowhere.org/schemas/custom/tag_reference-1.0.0" +required: [a] +... + """ yaml = """ custom: ! @@ -598,28 +786,23 @@ def types(self): "Something" things: !core/ndarray-1.0.0 data: [1, 2, 3] - b: ! - name: - "Anything" - things: !core/ndarray-1.0.0 - data: [4, 5, 6] """ - buff = helpers.yaml_to_asdf(yaml) - with asdf.open(buff, extensions=ForeignTypeExtension()) as ff: - a = ff.tree["custom"]["a"] - b = ff.tree["custom"]["b"] - assert a["name"] == "Something" - assert_array_equal(a["things"], [1, 2, 3]) - assert b["name"] == "Anything" - assert_array_equal(b["things"], [4, 5, 6]) + with tag_reference_extension(): + cfg = asdf.get_config() + cfg.add_resource_mapping({schema_uri: tag_schema}) + cfg.add_extension(ForeignTagReferenceExtension()) + + buff = helpers.yaml_to_asdf(yaml) + with asdf.open(buff) as ff: + a = ff.tree["custom"].a + assert a.name == "Something" + assert_array_equal(a.things, [1, 2, 3]) def test_self_reference_resolution(): - r = resolver.Resolver(CustomExtension().url_mapping, "url") s = schema.load_schema( helpers.get_test_data_path("self_referencing-1.0.0.yaml"), - resolver=r, resolve_references=True, ) assert "$ref" not in repr(s) @@ -628,11 +811,11 @@ def test_self_reference_resolution(): def test_schema_resolved_via_entry_points(): """Test that entry points mappings to core schema works""" - r = _legacy_extension.get_default_resolver() tag = asdf.testing.helpers.format_tag("stsci.edu", "asdf", "1.0.0", "fits/fits") - url = _legacy_extension.default_extensions.extension_list.tag_mapping(tag) - - s = schema.load_schema(url, resolver=r, resolve_references=True) + extension_manager = asdf.extension.get_cached_extension_manager(get_config().extensions) + schema_uris = extension_manager.get_tag_definition(tag).schema_uris + assert len(schema_uris) > 0 + s = schema.load_schema(schema_uris[0], resolve_references=True) assert tag in repr(s) @@ -806,67 +989,6 @@ def test_nested_array_yaml(tmp_path): schema.validate(b, schema=schema_tree) -def test_type_missing_dependencies(): - pytest.importorskip("astropy", "3.0.0") - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class MissingType(types.CustomType): - name = "missing" - organization = "nowhere.org" - version = (1, 1, 0) - standard = "custom" - types = ["asdfghjkl12345.foo"] - requires = ["ASDFGHJKL12345"] - - class DefaultTypeExtension(CustomExtension): - @property - def types(self): - return [MissingType] - - yaml = """ -custom: ! - b: {foo: 42} - """ - buff = helpers.yaml_to_asdf(yaml) - with pytest.warns( - AsdfConversionWarning, - match=r"Failed to convert tag:nowhere.org:custom/missing-1.1.0", - ), asdf.open(buff, extensions=[DefaultTypeExtension()]) as ff: - assert ff.tree["custom"]["b"]["foo"] == 42 - - -def test_assert_roundtrip_with_extension(tmp_path): - called_custom_assert_equal = [False] - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class CustomType(dict, types.CustomType): - name = "custom_flow" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" - - @classmethod - def assert_equal(cls, old, new): - called_custom_assert_equal[0] = True - - class CustomTypeExtension(CustomExtension): - @property - def types(self): - return [CustomType] - - tree = {"custom": CustomType({"a": 42, "b": 43})} - - def check(ff): - assert isinstance(ff.tree["custom"], CustomType) - - with helpers.assert_no_warnings(): - helpers.assert_roundtrip_tree(tree, tmp_path, extensions=[CustomTypeExtension()]) - - assert called_custom_assert_equal[0] is True - - def test_custom_validation_bad(tmp_path): custom_schema_path = helpers.get_test_data_path("custom_schema.yaml") asdf_file = str(tmp_path / "out.asdf") @@ -1020,44 +1142,6 @@ def test_custom_validation_with_external_ref_bad(tmp_path): pass -def test_nonexistent_tag(tmp_path): - """ - This tests the case where a node is tagged with a type that apparently - comes from an extension that is known, but the type itself can't be found. - - This could occur when a more recent version of an installed package - provides the new type, but an older version of the package is installed. - ASDF should still be able to open the file in this case, but it won't be - able to restore the type. - - The bug that prompted this test results from attempting to load a schema - file that doesn't exist, which is why this test belongs in this file. - """ - - # This shouldn't ever happen, but it's a useful test case - yaml = """ -a: !core/doesnt_exist-1.0.0 - hello - """ - - buff = helpers.yaml_to_asdf(yaml) - with pytest.warns(AsdfWarning, match=r"Unable to locate schema file"), asdf.open(buff) as af: - assert str(af["a"]) == "hello" - - # This is a more realistic case since we're using an external extension - yaml = """ -a: ! - hello - """ - - buff = helpers.yaml_to_asdf(yaml) - with pytest.warns(AsdfWarning, match=r"Unable to locate schema file"), asdf.open( - buff, - extensions=CustomExtension(), - ) as af: - assert str(af["a"]) == "hello" - - @pytest.mark.parametrize( ("numpy_value", "valid_types"), [ diff --git a/asdf/_tests/test_types.py b/asdf/_tests/test_types.py index add2d4fde..b20fdb43b 100644 --- a/asdf/_tests/test_types.py +++ b/asdf/_tests/test_types.py @@ -1,246 +1,9 @@ -import io -from fractions import Fraction - import pytest import asdf -from asdf import _types as types -from asdf import util, versioning -from asdf.exceptions import AsdfConversionWarning, AsdfDeprecationWarning, AsdfWarning -from asdf.extension import _legacy +from asdf.exceptions import AsdfConversionWarning from . import _helpers as helpers -from .objects import CustomExtension, CustomTestType - -TEST_DATA_PATH = str(helpers.get_test_data_path("")) - - -class Fractional2dCoord: - def __init__(self, x, y): - self.x = x - self.y = y - - -class FractionWithInverse(Fraction): - def __init__(self, *args, **kwargs): - self._inverse = None - - @property - def inverse(self): - return self._inverse - - @inverse.setter - def inverse(self, value): - self._inverse = value - - -with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class FractionWithInverseType(types.CustomType): - name = "fraction_with_inverse" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" - types = [FractionWithInverse] - - @classmethod - def to_tree(cls, node, ctx): - return {"numerator": node.numerator, "denominator": node.denominator, "inverse": node.inverse} - - @classmethod - def from_tree(cls, tree, ctx): - result = FractionWithInverse(tree["numerator"], tree["denominator"]) - yield result - result.inverse = tree["inverse"] - - -class FractionWithInverseExtension(CustomExtension): - @property - def types(self): - return [FractionWithInverseType] - - @property - def tag_mapping(self): - return [("tag:nowhere.org:custom", "http://nowhere.org/schemas/custom{tag_suffix}")] - - @property - def url_mapping(self): - return [("http://nowhere.org/schemas/custom/", util.filepath_to_url(TEST_DATA_PATH) + "/{url_suffix}.yaml")] - - -def fractiontype_factory(): - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class FractionType(types.CustomType): - name = "fraction" - organization = "nowhere.org" - version = (1, 0, 0) - standard = "custom" - types = [Fraction] - handle_dynamic_subclasses = True - - @classmethod - def to_tree(cls, node, ctx): - return [node.numerator, node.denominator] - - @classmethod - def from_tree(cls, tree, ctx): - return Fraction(tree[0], tree[1]) - - return FractionType - - -def fractional2dcoordtype_factory(): - FractionType = fractiontype_factory() - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class Fractional2dCoordType(types.CustomType): - name = "fractional_2d_coord" - organization = "nowhere.org" - standard = "custom" - version = (1, 0, 0) - types = [Fractional2dCoord] - - @classmethod - def to_tree(cls, node, ctx): - return {"x": node.x, "y": node.y} - - @classmethod - def from_tree(cls, tree, ctx): - return Fractional2dCoord(tree["x"], tree["y"]) - - class Fractional2dCoordExtension(CustomExtension): - @property - def types(self): - return [FractionType, Fractional2dCoordType] - - return FractionType, Fractional2dCoordType, Fractional2dCoordExtension - - -def test_custom_tag(): - FractionType = fractiontype_factory() - - class FractionExtension(CustomExtension): - @property - def types(self): - return [FractionType] - - class FractionCallable(FractionExtension): - @property - def tag_mapping(self): - def check(tag): - prefix = "tag:nowhere.org:custom" - if tag.startswith(prefix): - return "http://nowhere.org/schemas/custom" + tag[len(prefix) :] - - return None - - return [check] - - yaml = """ -a: ! - [2, 3] -b: !core/complex-1.0.0 - 0j - """ - - buff = helpers.yaml_to_asdf(yaml) - with asdf.open(buff, extensions=FractionExtension()) as ff: - assert ff.tree["a"] == Fraction(2, 3) - - buff = io.BytesIO() - ff.write_to(buff) - - buff = helpers.yaml_to_asdf(yaml) - with asdf.open(buff, extensions=FractionCallable()) as ff: - assert ff.tree["a"] == Fraction(2, 3) - - buff = io.BytesIO() - ff.write_to(buff) - buff.close() - - -def test_version_mismatch_with_supported_versions(): - """Make sure that defining the supported_versions field eliminates - the schema mismatch warning.""" - - class CustomFlow: - pass - - class CustomFlowType(CustomTestType): - version = "1.1.0" - supported_versions = ["1.0.0", "1.1.0"] - name = "custom_flow" - organization = "nowhere.org" - standard = "custom" - types = [CustomFlow] - - class CustomFlowExtension(CustomExtension): - @property - def types(self): - return [CustomFlowType] - - yaml = """ -flow_thing: - ! - c: 100 - d: 3.14 -""" - buff = helpers.yaml_to_asdf(yaml) - with helpers.assert_no_warnings(): - asdf.open(buff, ignore_version_mismatch=False, extensions=CustomFlowExtension()) - - -def test_longest_match(): - class FancyComplexExtension: - @property - def types(self): - return [] - - @property - def tag_mapping(self): - return [] - - @property - def url_mapping(self): - return [("http://stsci.edu/schemas/asdf/core/", "FOOBAR/{url_suffix}")] - - extension_list = _legacy.AsdfExtensionList([_legacy.BuiltinExtension(), FancyComplexExtension()]) - - assert extension_list.url_mapping("http://stsci.edu/schemas/asdf/core/asdf-1.0.0") == "FOOBAR/asdf-1.0.0" - assert ( - extension_list.url_mapping("http://stsci.edu/schemas/asdf/transform/transform-1.0.0") - != "FOOBAR/transform-1.0.0" - ) - - -def test_module_versioning(): - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class NoModuleType(types.CustomType): - # It seems highly unlikely that this would be a real module - requires = ["qkjvqdja"] - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class HasCorrectPytest(types.CustomType): - # This means it requires 1.0.0 or greater, so it should succeed - requires = ["pytest-1.0.0"] - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class DoesntHaveCorrectPytest(types.CustomType): - requires = ["pytest-91984.1.7"] - - nmt = NoModuleType() - hcp = HasCorrectPytest() - # perhaps an unfortunate acroynm - dhcp = DoesntHaveCorrectPytest() - - assert nmt.has_required_modules is False - assert hcp.has_required_modules is True - assert dhcp.has_required_modules is False def test_undefined_tag(): @@ -285,338 +48,3 @@ def test_undefined_tag(): buff.seek(0) with helpers.assert_no_warnings(): afile = asdf.open(buff, ignore_unrecognized_tag=True) - - -def test_newer_tag(): - """ - This test simulates a scenario where newer versions of CustomFlow - provides different keyword parameters that the older schema and tag class - do not account for. We want to test whether ASDF can handle this problem - gracefully and still provide meaningful data as output. The test case is - fairly contrived but we want to test whether ASDF can handle backwards - compatibility even when an explicit tag class for different versions of a - schema is not available. - """ - - class CustomFlow: - def __init__(self, c=None, d=None): - self.c = c - self.d = d - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class CustomFlowType(types.CustomType): - version = "1.1.0" - name = "custom_flow" - organization = "nowhere.org" - standard = "custom" - types = [CustomFlow] - - @classmethod - def from_tree(cls, tree, ctx): - kwargs = {} - for name in tree: - kwargs[name] = tree[name] - return CustomFlow(**kwargs) - - @classmethod - def to_tree(cls, data, ctx): - return {"c": data.c, "d": data.d} - - class CustomFlowExtension(CustomExtension): - @property - def types(self): - return [CustomFlowType] - - new_yaml = """ -flow_thing: - ! - c: 100 - d: 3.14 -""" - new_buff = helpers.yaml_to_asdf(new_yaml) - new_data = asdf.open(new_buff, extensions=CustomFlowExtension()) - assert type(new_data.tree["flow_thing"]) == CustomFlow - - old_yaml = """ -flow_thing: - ! - a: 100 - b: 3.14 -""" - old_buff = helpers.yaml_to_asdf(old_yaml) - # We expect this warning since it will not be possible to convert version - # 1.0.0 of CustomFlow to a CustomType (by design, for testing purposes). - with pytest.warns(AsdfConversionWarning, match=r"Failed to convert tag:nowhere.org:custom/custom_flow-1.0.0"): - asdf.open(old_buff, extensions=CustomFlowExtension()) - - -def test_incompatible_version_check(): - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class TestType0(types.CustomType): - supported_versions = versioning.AsdfSpec(">=1.2.0") - - assert TestType0.incompatible_version("1.1.0") is True - assert TestType0.incompatible_version("1.2.0") is False - assert TestType0.incompatible_version("2.0.1") is False - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class TestType1(types.CustomType): - supported_versions = versioning.AsdfVersion("1.0.0") - - assert TestType1.incompatible_version("1.0.0") is False - assert TestType1.incompatible_version("1.1.0") is True - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class TestType2(types.CustomType): - supported_versions = "1.0.0" - - assert TestType2.incompatible_version("1.0.0") is False - assert TestType2.incompatible_version("1.1.0") is True - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class TestType3(types.CustomType): - # This doesn't make much sense, but it's just for the sake of example - supported_versions = ["1.0.0", versioning.AsdfSpec(">=2.0.0")] - - assert TestType3.incompatible_version("1.0.0") is False - assert TestType3.incompatible_version("1.1.0") is True - assert TestType3.incompatible_version("2.0.0") is False - assert TestType3.incompatible_version("2.0.1") is False - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class TestType4(types.CustomType): - supported_versions = ["1.0.0", versioning.AsdfVersion("1.1.0")] - - assert TestType4.incompatible_version("1.0.0") is False - assert TestType4.incompatible_version("1.0.1") is True - assert TestType4.incompatible_version("1.1.0") is False - assert TestType4.incompatible_version("1.1.1") is True - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class TestType5(types.CustomType): - supported_versions = [versioning.AsdfSpec("<1.0.0"), versioning.AsdfSpec(">=2.0.0")] - - assert TestType5.incompatible_version("0.9.9") is False - assert TestType5.incompatible_version("2.0.0") is False - assert TestType5.incompatible_version("2.0.1") is False - assert TestType5.incompatible_version("1.0.0") is True - assert TestType5.incompatible_version("1.1.0") is True - - with pytest.raises(ValueError, match=r"Invalid version string: .*"), pytest.warns( - AsdfDeprecationWarning, - match=r".*subclasses the deprecated CustomType.*", - ): - - class TestType6(types.CustomType): - supported_versions = "blue" - - with pytest.raises(ValueError, match=r"Invalid version string: .*"), pytest.warns( - AsdfDeprecationWarning, - match=r".*subclasses the deprecated CustomType.*", - ): - - class TestType7(types.CustomType): - supported_versions = ["1.1.0", "2.2.0", "blue"] - - -def test_supported_versions(): - class CustomFlow: - def __init__(self, c=None, d=None): - self.c = c - self.d = d - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class CustomFlowType(types.CustomType): - version = "1.1.0" - supported_versions = [(1, 0, 0), versioning.AsdfSpec(">=1.1.0")] - name = "custom_flow" - organization = "nowhere.org" - standard = "custom" - types = [CustomFlow] - - @classmethod - def from_tree(cls, tree, ctx): - # Convert old schema to new CustomFlow type - if cls.version == "1.0.0": - return CustomFlow(c=tree["a"], d=tree["b"]) - - return CustomFlow(**tree) - - @classmethod - def to_tree(cls, data, ctx): - if cls.version == "1.0.0": - return {"a": data.c, "b": data.d} - - return {"c": data.c, "d": data.d} - - class CustomFlowExtension(CustomExtension): - @property - def types(self): - return [CustomFlowType] - - new_yaml = """ -flow_thing: - ! - c: 100 - d: 3.14 -""" - old_yaml = """ -flow_thing: - ! - a: 100 - b: 3.14 -""" - new_buff = helpers.yaml_to_asdf(new_yaml) - new_data = asdf.open(new_buff, extensions=CustomFlowExtension()) - assert type(new_data.tree["flow_thing"]) == CustomFlow - - old_buff = helpers.yaml_to_asdf(old_yaml) - old_data = asdf.open(old_buff, extensions=CustomFlowExtension()) - assert type(old_data.tree["flow_thing"]) == CustomFlow - - -def test_unsupported_version_warning(): - class CustomFlow: - pass - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class CustomFlowType(types.CustomType): - version = "1.0.0" - supported_versions = [(1, 0, 0)] - name = "custom_flow" - organization = "nowhere.org" - standard = "custom" - types = [CustomFlow] - - class CustomFlowExtension(CustomExtension): - @property - def types(self): - return [CustomFlowType] - - yaml = """ -flow_thing: - ! - c: 100 - d: 3.14 -""" - buff = helpers.yaml_to_asdf(yaml) - - with pytest.warns( - AsdfConversionWarning, - match=r"Version 1.1.0 of tag:nowhere.org:custom/custom_flow is not compatible", - ): - asdf.open(buff, extensions=CustomFlowExtension()) - - -def test_tag_without_schema(tmp_path): - tmpfile = str(tmp_path / "foo.asdf") - - with pytest.warns(AsdfDeprecationWarning, match=".*subclasses the deprecated CustomType.*"): - - class FooType(types.CustomType): - name = "foo" - - def __init__(self, a, b): - self.a = a - self.b = b - - @classmethod - def from_tree(cls, tree, ctx): - return cls(tree["a"], tree["b"]) - - @classmethod - def to_tree(cls, node, ctx): - return {"a": node.a, "b": node.b} - - def __eq__(self, other): - return self.a == other.a and self.b == other.b - - class FooExtension: - @property - def types(self): - return [FooType] - - @property - def tag_mapping(self): - return [] - - @property - def url_mapping(self): - return [] - - foo = FooType("hello", 42) - tree = {"foo": foo} - - with pytest.warns(AsdfWarning, match=r"Unable to locate schema file"), asdf.AsdfFile( - tree, - extensions=FooExtension(), - ) as af: - af.write_to(tmpfile) - - with pytest.warns(AsdfWarning, match=r"Unable to locate schema file"), asdf.AsdfFile( - tree, - extensions=FooExtension(), - ) as ff: - assert isinstance(ff.tree["foo"], FooType) - assert ff.tree["foo"] == tree["foo"] - - -def test_custom_reference_cycle(tmp_path): - f1 = FractionWithInverse(3, 5) - f2 = FractionWithInverse(5, 3) - f1.inverse = f2 - f2.inverse = f1 - tree = {"fraction": f1} - - path = str(tmp_path / "with_inverse.asdf") - - with asdf.AsdfFile(tree, extensions=FractionWithInverseExtension()) as af: - af.write_to(path) - - with asdf.open(path, extensions=FractionWithInverseExtension()) as af: - assert af["fraction"].inverse.inverse is af["fraction"] - - -def test_super_use_in_versioned_subclass(): - """ - Test fix for issue: https://github.com/asdf-format/asdf/issues/1245 - - Legacy extensions cannot use super in subclasses of CustomType - that define supported_versions due to the metaclasses inability - to create distinct __classcell__ closures. - """ - - class Foo: - def __init__(self, bar): - self.bar = bar - - with pytest.raises(RuntimeError, match=r".* ExtensionTypeMeta .* __classcell__ .*"), pytest.warns( - AsdfDeprecationWarning, - match=".*subclasses the deprecated CustomType.*", - ): - - class FooType(types.CustomType): - name = "foo" - version = (1, 0, 0) - supported_versions = [(1, 1, 0), (1, 2, 0)] - types = [Foo] - - @classmethod - def to_tree(cls, node, ctx): - return {"bar": node.bar} - - @classmethod - def from_tree(cls, tree, ctx): - return Foo(tree["bar"]) - - def __getattribute__(self, name): - return super().__getattribute__(name) diff --git a/asdf/_tests/test_util.py b/asdf/_tests/test_util.py index 112221390..e305bb25c 100644 --- a/asdf/_tests/test_util.py +++ b/asdf/_tests/test_util.py @@ -3,7 +3,6 @@ import pytest from asdf import generic_io, util -from asdf.extension._legacy import BuiltinExtension def test_is_primitive(): @@ -35,10 +34,6 @@ def test_get_class_name(): ) -def test_get_class_name_override(): - assert util.get_class_name(BuiltinExtension, instance=False) == "asdf.extension.BuiltinExtension" - - def test_patched_urllib_parse(): assert "asdf" in util.patched_urllib_parse.uses_relative assert "asdf" in util.patched_urllib_parse.uses_netloc diff --git a/asdf/_type_index.py b/asdf/_type_index.py deleted file mode 100644 index bf39f696a..000000000 --- a/asdf/_type_index.py +++ /dev/null @@ -1,337 +0,0 @@ -import bisect -from collections import OrderedDict -from functools import lru_cache - -from . import util -from .versioning import AsdfVersion, default_version, get_version_map, join_tag_version, split_tag_version - -__all__ = ["AsdfTypeIndex"] - - -_BASIC_PYTHON_TYPES = [str, int, float, list, dict, tuple] - - -class _AsdfWriteTypeIndex: - """ - The _AsdfWriteTypeIndex is a helper class for AsdfTypeIndex that - manages an index of types for writing out ASDF files, i.e. from - converting from custom types to tagged_types. It is not always - the inverse of the mapping from tags to custom types, since there - are likely multiple versions present for a given tag. - - This uses the `version_map.yaml` file that ships with the ASDF - standard to figure out which schemas correspond to a particular - version of the ASDF standard. - - An AsdfTypeIndex manages multiple _AsdfWriteTypeIndex instances - for each version the user may want to write out, and they are - instantiated on-demand. - - If version is ``'latest'``, it will just use the highest-numbered - versions of each of the schemas. This is currently only used to - aid in testing. - - In the future, this may be renamed to _ExtensionWriteTypeIndex since it is - not specific to classes that inherit `AsdfType`. - """ - - _version_map = None - - def __init__(self, version, index): - self._version = version - - self._type_by_cls = {} - self._type_by_name = {} - self._type_by_subclasses = {} - self._class_by_subclass = {} - self._types_with_dynamic_subclasses = {} - self._extension_by_cls = {} - self._extensions_used = set() - - try: - version_map = get_version_map(self._version) - core_version_map = version_map["core"] - standard_version_map = version_map["standard"] - except ValueError as err: - msg = f"Don't know how to write out ASDF version {self._version}" - raise ValueError(msg) from err - - # Process all types defined in the ASDF version map. It is important to - # make sure that tags that are associated with the core part of the - # standard are processed first in order to handle subclasses properly. - for name, _version in core_version_map.items(): - self._add_by_tag(index, name, AsdfVersion(_version)) - for name, _version in standard_version_map.items(): - self._add_by_tag(index, name, AsdfVersion(_version)) - - # Now add any extension types that aren't known to the ASDF standard. - # This expects that all types defined by ASDF will be encountered - # before any types that are defined by external packages. This - # allows external packages to override types that are also defined - # by ASDF. The ordering is guaranteed due to the use of OrderedDict - # for _versions_by_type_name, and due to the fact that the built-in - # extension will always be processed first. - for name, versions in index._versions_by_type_name.items(): - if name not in self._type_by_name: - self._add_by_tag(index, name, versions[-1]) - - for asdftype in index._unnamed_types: - self._add_all_types(index, asdftype) - - def _should_overwrite(self, cls, new_type): - existing_type = self._type_by_cls[cls] - - # Types that are provided by extensions from other packages should - # only override the type index corresponding to the latest version - # of ASDF. - if existing_type.tag_base() != new_type.tag_base(): - return self._version == default_version - - return True - - def _add_type_to_index(self, index, cls, typ): - if cls in self._type_by_cls and not self._should_overwrite(cls, typ): - return - - self._type_by_cls[cls] = typ - self._extension_by_cls[cls] = index._extension_by_type[typ] - - def _add_subclasses(self, index, typ, asdftype): - for subclass in util.iter_subclasses(typ): - if ( - # Do not overwrite the tag type for an existing subclass if the - # new tag serializes a class that is higher in the type - # hierarchy than the existing subclass. - subclass in self._class_by_subclass - and issubclass(self._class_by_subclass[subclass], typ) - # Allow for cases where a subclass tag is being - # overridden by a tag from another extension. - and self._extension_by_cls[subclass] == index._extension_by_type[asdftype] - ): - continue - - self._class_by_subclass[subclass] = typ - self._type_by_subclasses[subclass] = asdftype - self._extension_by_cls[subclass] = index._extension_by_type[asdftype] - - def _add_all_types(self, index, asdftype): - self._add_type_to_index(index, asdftype, asdftype) - for typ in asdftype.types: - self._add_type_to_index(index, typ, asdftype) - self._add_subclasses(index, typ, asdftype) - - if asdftype.handle_dynamic_subclasses: - for typ in asdftype.types: - self._types_with_dynamic_subclasses[typ] = asdftype - - def _add_by_tag(self, index, name, version): - tag = join_tag_version(name, version) - if tag in index._type_by_tag: - asdftype = index._type_by_tag[tag] - self._type_by_name[name] = asdftype - self._add_all_types(index, asdftype) - - def _mark_used_extension(self, custom_type, serialization_context): - extension = self._extension_by_cls[custom_type] - self._extensions_used.add(extension) - if serialization_context is not None: - serialization_context._mark_extension_used(extension) - - def _process_dynamic_subclass(self, custom_type, serialization_context): - for key, val in self._types_with_dynamic_subclasses.items(): - if issubclass(custom_type, key): - self._type_by_cls[custom_type] = val - self._mark_used_extension(key, serialization_context) - return val - - return None - - def from_custom_type(self, custom_type, _serialization_context=None): - """ - Given a custom type, return the corresponding `ExtensionType` - definition. - """ - asdftype = None - - # Try to find an exact class match first... - try: - asdftype = self._type_by_cls[custom_type] - except KeyError: - # ...failing that, match any subclasses - try: - asdftype = self._type_by_subclasses[custom_type] - except KeyError: - # ...failing that, try any subclasses that we couldn't - # cache in _type_by_subclasses. This generally only - # includes classes that are created dynamically post - # Python-import, e.g. astropy.modeling._CompoundModel - # subclasses. - return self._process_dynamic_subclass(custom_type, _serialization_context) - - if asdftype is not None: - extension = self._extension_by_cls.get(custom_type) - if extension is not None: - self._mark_used_extension(custom_type, _serialization_context) - else: - # Handle the case where the dynamic subclass was identified as - # a proper subclass above, but it has not yet been registered - # as such. - self._process_dynamic_subclass(custom_type, _serialization_context) - - return asdftype - - -class AsdfTypeIndex: - """ - An index of the known `ExtensionType` classes. - - In the future this class may be renamed to ExtensionTypeIndex, since it is - not specific to classes that inherit `AsdfType`. - """ - - def __init__(self): - self._write_type_indices = {} - self._type_by_tag = {} - # Use OrderedDict here to preserve the order in which types are added - # to the type index. Since the ASDF built-in extension is always - # processed first, this ensures that types defined by external packages - # will always override corresponding types that are defined by ASDF - # itself. However, if two different external packages define tags for - # the same type, the result is currently undefined. - self._versions_by_type_name = OrderedDict() - self._best_matches = {} - self._unnamed_types = set() - self._hooks_by_type = {} - self._all_types = set() - self._has_warned = {} - self._extension_by_type = {} - - def add_type(self, asdftype, extension): - """ - Add a type to the index. - """ - self._all_types.add(asdftype) - self._extension_by_type[asdftype] = extension - - if asdftype.yaml_tag is None and asdftype.name is None: - return - - if isinstance(asdftype.name, list): - yaml_tags = [asdftype.make_yaml_tag(name) for name in asdftype.name] - elif isinstance(asdftype.name, str): - yaml_tags = [asdftype.yaml_tag] - elif asdftype.name is None: - yaml_tags = [] - else: - msg = "name must be a string, list or None" - raise TypeError(msg) - - for yaml_tag in yaml_tags: - self._type_by_tag[yaml_tag] = asdftype - name, version = split_tag_version(yaml_tag) - versions = self._versions_by_type_name.get(name) - if versions is None: - self._versions_by_type_name[name] = [version] - else: - idx = bisect.bisect_left(versions, version) - if idx == len(versions) or versions[idx] != version: - versions.insert(idx, version) - - if not len(yaml_tags): - self._unnamed_types.add(asdftype) - - def from_custom_type(self, custom_type, version=default_version, _serialization_context=None): - """ - Given a custom type, return the corresponding `ExtensionType` - definition. - """ - # Basic Python types should not ever have an AsdfType associated with - # them. - if custom_type in _BASIC_PYTHON_TYPES: - return None - - write_type_index = self._write_type_indices.get(str(version)) - if write_type_index is None: - write_type_index = _AsdfWriteTypeIndex(version, self) - self._write_type_indices[version] = write_type_index - - return write_type_index.from_custom_type(custom_type, _serialization_context=_serialization_context) - - def fix_yaml_tag(self, ctx, tag): - """ - Given a YAML tag, adjust it to the best supported version. - - If there is no exact match, this finds the newest version - understood that is still less than the version in file. Or, - the earliest understood version if none are less than the - version in the file. - """ - if tag in self._type_by_tag: - return tag - - if tag in self._best_matches: - best_tag = self._best_matches[tag] - ctx._warn_tag_mismatch(tag, best_tag) - return best_tag - - name, version = split_tag_version(tag) - - versions = self._versions_by_type_name.get(name) - if versions is None: - return tag - - # The versions list is kept sorted, so bisect can be used to - # quickly find the best option. - i = bisect.bisect_left(versions, version) - i = max(0, i - 1) - best_version = versions[i] - best_tag = join_tag_version(name, best_version) - ctx._warn_tag_mismatch(tag, best_tag) - self._best_matches[tag] = best_tag - return best_tag - - def from_yaml_tag(self, ctx, tag, _serialization_context=None): - """ - From a given YAML tag string, return the corresponding - AsdfType definition. - """ - tag = self.fix_yaml_tag(ctx, tag) - asdftype = self._type_by_tag.get(tag) - if asdftype is not None and _serialization_context is not None: - _serialization_context._mark_extension_used(self._extension_by_type[asdftype]) - return asdftype - - @lru_cache(5) - def has_hook(self, hook_name): - """ - Returns `True` if the given hook name exists on any of the managed - types. - """ - return any(hasattr(cls, hook_name) for cls in self._all_types) - - def get_hook_for_type(self, hookname, typ, version=default_version): - """ - Get the hook function for the given type, if it exists, - else return None. - """ - hooks = self._hooks_by_type.setdefault(hookname, {}) - hook = hooks.get(typ, None) - if hook is not None: - return hook - - tag = self.from_custom_type(typ, version) - if tag is not None: - hook = getattr(tag, hookname, None) - if hook is not None: - hooks[typ] = hook - return hook - - hooks[typ] = None - return None - - def get_extensions_used(self, version=default_version): - write_type_index = self._write_type_indices.get(str(version)) - if write_type_index is None: - return [] - - return list(write_type_index._extensions_used) diff --git a/asdf/_types.py b/asdf/_types.py deleted file mode 100644 index 45e2c567a..000000000 --- a/asdf/_types.py +++ /dev/null @@ -1,493 +0,0 @@ -import importlib -import re -import warnings -from copy import copy - -import asdf.testing.helpers - -from . import tagged, util -from .exceptions import AsdfDeprecationWarning -from .versioning import AsdfSpec, AsdfVersion - -__all__ = ["format_tag", "CustomType", "ExtensionType"] # noqa: F822 - - -# regex used to parse module name from optional version string -MODULE_RE = re.compile(r"([a-zA-Z]+)(-(\d+\.\d+\.\d+))?") - - -def __getattr__(name): - if name == "format_tag": - warnings.warn( - "asdf.types.format_tag is deprecated. Please use asdf.testing.helpers.format_tag", - AsdfDeprecationWarning, - ) - return asdf.testing.helpers.format_tag - msg = f"module {__name__!r} has no attribute {name!r}" - raise AttributeError(msg) - - -_all_asdftypes = set() - - -def _from_tree_tagged_missing_requirements(cls, tree, ctx): - # A special version of AsdfType.from_tree_tagged for when the - # required dependencies for an AsdfType are missing. - plural, verb = ("s", "are") if len(cls.requires) else ("", "is") - - # This error will be handled by yamlutil.tagged_tree_to_custom_tree, which - # will cause a warning to be issued indicating that the tree failed to be - # converted. - msg = f"{util.human_list(cls.requires)} package{plural} {verb} required to instantiate '{tree._tag}'" - raise TypeError(msg) - - -class ExtensionTypeMeta(type): - """ - Custom class constructor for tag types. - """ - - _import_cache = {} - - @classmethod - def _has_required_modules(cls, requires): - for string in requires: - has_module = True - match = MODULE_RE.match(string) - modname, _, version = match.groups() - if modname in cls._import_cache and not cls._import_cache[modname]: - return False - - try: - module = importlib.import_module(modname) - if version and hasattr(module, "__version__") and module.__version__ < version: - has_module = False - - except ImportError: - has_module = False - - finally: - cls._import_cache[modname] = has_module - if not has_module: - return False - - return True - - @classmethod - def _find_in_bases(cls, attrs, bases, name, default=None): - if name in attrs: - return attrs[name] - for base in bases: - if hasattr(base, name): - return getattr(base, name) - return default - - @property - def versioned_siblings(cls): - return getattr(cls, "__versioned_siblings") or [] - - def __new__(cls, name, bases, attrs): - requires = cls._find_in_bases(attrs, bases, "requires", []) - if not cls._has_required_modules(requires): - attrs["from_tree_tagged"] = classmethod(_from_tree_tagged_missing_requirements) - attrs["types"] = [] - attrs["has_required_modules"] = False - else: - attrs["has_required_modules"] = True - types = cls._find_in_bases(attrs, bases, "types", []) - new_types = [] - for type_ in types: - new_types.append(util.resolve_name(type_) if isinstance(type_, str) else type_) - - attrs["types"] = new_types - - new_cls = super().__new__(cls, name, bases, attrs) - - if hasattr(new_cls, "version") and not isinstance(new_cls.version, (AsdfVersion, AsdfSpec)): - new_cls.version = AsdfVersion(new_cls.version) - - if hasattr(new_cls, "name"): - if isinstance(new_cls.name, str): - if "yaml_tag" not in attrs: - new_cls.yaml_tag = new_cls.make_yaml_tag(new_cls.name) - elif isinstance(new_cls.name, list): - pass - elif new_cls.name is not None: - msg = "name must be string or list" - raise TypeError(msg) - - if hasattr(new_cls, "supported_versions"): - if not isinstance(new_cls.supported_versions, (list, set)): - new_cls.supported_versions = [new_cls.supported_versions] - supported_versions = set() - for version in new_cls.supported_versions: - # This should cause an exception for invalid input - supported_versions.add( - version if isinstance(version, (AsdfVersion, AsdfSpec)) else AsdfVersion(version), - ) - # We need to convert back to a list here so that the 'in' operator - # uses actual comparison instead of hash equality - new_cls.supported_versions = list(supported_versions) - siblings = [] - for version in new_cls.supported_versions: - if version != new_cls.version: - new_attrs = copy(attrs) - new_attrs["version"] = version - new_attrs["supported_versions"] = set() - new_attrs["_latest_version"] = new_cls.version - if "__classcell__" in new_attrs: - msg = ( - "Subclasses of ExtensionTypeMeta that define " - "supported_versions cannot used super() to call " - "parent class functions. super() creates a " - "__classcell__ closure that cannot be duplicated " - "during creation of versioned siblings. " - "See https://github.com/asdf-format/asdf/issues/1245" - ) - raise RuntimeError(msg) - siblings.append(ExtensionTypeMeta.__new__(cls, name, bases, new_attrs)) - setattr(new_cls, "__versioned_siblings", siblings) - - return new_cls - - -class AsdfTypeMeta(ExtensionTypeMeta): - """ - Keeps track of `AsdfType` subclasses that are created, and stores them in - `AsdfTypeIndex`. - """ - - def __new__(cls, name, bases, attrs): - new_cls = super().__new__(cls, name, bases, attrs) - # Classes using this metaclass get added to the list of built-in - # extensions - if name != "_AsdfType": - _all_asdftypes.add(new_cls) - - return new_cls - - -class ExtensionType: - """ - The base class of all custom types in the tree. - - Besides the attributes defined below, most subclasses will also - override ``to_tree`` and ``from_tree``. - """ - - name = None - organization = "stsci.edu" - standard = "asdf" - version = (1, 0, 0) - supported_versions = set() - types = [] - handle_dynamic_subclasses = False - validators = {} - requires = [] - yaml_tag = None - - @classmethod - def names(cls): - """ - Returns the name(s) represented by this tag type as a list. - - While some tag types represent only a single custom type, others - represent multiple types. In the latter case, the `name` attribute of - the extension is actually a list, not simply a string. This method - normalizes the value of `name` by returning a list in all cases. - - Returns - ------- - `list` of names represented by this tag type - """ - if cls.name is None: - return None - - return cls.name if isinstance(cls.name, list) else [cls.name] - - @classmethod - def make_yaml_tag(cls, name, versioned=True): - """ - Given the name of a type, returns a string representing its YAML tag. - - Parameters - ---------- - name : str - The name of the type. In most cases this will correspond to the - `name` attribute of the tag type. However, it is passed as a - parameter since some tag types represent multiple custom - types. - - versioned : bool - If `True`, the tag will be versioned. Otherwise, a YAML tag without - a version will be returned. - - Returns - ------- - `str` representing the YAML tag - """ - return asdf.testing.helpers.format_tag(cls.organization, cls.standard, cls.version if versioned else None, name) - - @classmethod - def tag_base(cls): - """ - Returns the base of the YAML tag for types represented by this class. - - This method returns the portion of the tag that represents the standard - and the organization of any type represented by this class. - - Returns - ------- - `str` representing the base of the YAML tag - """ - return cls.make_yaml_tag("", versioned=False) - - @classmethod - def to_tree(cls, node, ctx): - """ - Converts instances of custom types into YAML representations. - - This method should be overridden by custom extension classes in order - to define how custom types are serialized into YAML. The method must - return a single Python object corresponding to one of the basic YAML - types (dict, list, str, or number). However, the types can be nested - and combined in order to represent more complex custom types. - - This method is called as part of the process of writing an `asdf.AsdfFile` - object. Whenever a custom type (or a subclass of that type) that is - listed in the `types` attribute of this class is encountered, this - method will be used to serialize that type. - - The name `to_tree` refers to the act of converting a custom type into - part of a YAML object tree. - - Parameters - ---------- - node : `object` - Instance of a custom type to be serialized. Will be an instance (or - an instance of a subclass) of one of the types listed in the - `types` attribute of this class. - - ctx : `asdf.AsdfFile` - An instance of the `asdf.AsdfFile` object that is being written out. - - Returns - ------- - A basic YAML type (`dict`, `list`, `str`, `int`, `float`, or - `complex`) representing the properties of the custom type to be - serialized. These types can be nested in order to represent more - complex custom types. - """ - return node.__class__.__bases__[0](node) - - @classmethod - def to_tree_tagged(cls, node, ctx): - """ - Converts instances of custom types into tagged objects. - - It is more common for custom tag types to override `to_tree` instead of - this method. This method should be overridden if it is necessary - to modify the YAML tag that will be used to tag this object. - - Parameters - ---------- - node : `object` - Instance of a custom type to be serialized. Will be an instance (or - an instance of a subclass) of one of the types listed in the - `types` attribute of this class. - - ctx : `asdf.AsdfFile` - An instance of the `asdf.AsdfFile` object that is being written out. - - Returns - ------- - An instance of `asdf.tagged.Tagged`. - """ - obj = cls.to_tree(node, ctx) - return tagged.tag_object(cls.yaml_tag, obj, ctx=ctx) - - @classmethod - def from_tree(cls, tree, ctx): - """ - Converts basic types representing YAML trees into custom types. - - This method should be overridden by custom extension classes in order - to define how custom types are deserialized from the YAML - representation back into their original types. Typically the method will - return an instance of the original custom type. It is also permitted - to return a generator, which yields a partially constructed result, then - completes construction once the generator is drained. This is useful - when constructing objects that contain reference cycles. - - This method is called as part of the process of reading an ASDF file in - order to construct an `asdf.AsdfFile` object. Whenever a YAML subtree is - encountered that has a tag that corresponds to the `yaml_tag` property - of this class, this method will be used to deserialize that tree back - into an instance of the original custom type. - - Parameters - ---------- - tree : `object` representing YAML tree - An instance of a basic Python type (possibly nested) that - corresponds to a YAML subtree. - - ctx : `asdf.AsdfFile` - An instance of the `asdf.AsdfFile` object that is being constructed. - - Returns - ------- - An instance of the custom type represented by this extension class, - or a generator that yields that instance. - """ - return cls(tree) - - @classmethod - def from_tree_tagged(cls, tree, ctx): - """ - Converts from tagged tree into custom type. - - It is more common for extension classes to override `from_tree` instead - of this method. This method should only be overridden if it is - necessary to access the `_tag` property of the `~asdf.tagged.Tagged` object - directly. - - Parameters - ---------- - tree : `asdf.tagged.Tagged` object representing YAML tree - - ctx : `asdf.AsdfFile` - An instance of the `asdf.AsdfFile` object that is being constructed. - - Returns - ------- - An instance of the custom type represented by this extension class. - """ - return cls.from_tree(tree.data, ctx) - - @classmethod - def incompatible_version(cls, version): - """ - Indicates if given version is known to be incompatible with this type. - - If this tag class explicitly identifies compatible versions then this - checks whether a given version is compatible or not (see - `supported_versions`). Otherwise, all versions are assumed to be - compatible. - - Child classes can override this method to affect how version - compatibility for this type is determined. - - Parameters - ---------- - version : `str` or `~asdf.versioning.AsdfVersion` - The version to test for compatibility. - """ - if cls.supported_versions and version not in cls.supported_versions: - return True - - return False - - -class _AsdfType(ExtensionType, metaclass=AsdfTypeMeta): - """ - Base class for all built-in ASDF types. Types that inherit this class will - be automatically added to the list of built-ins. This should *not* be used - for user-defined extensions. - """ - - -class CustomType(ExtensionType, metaclass=ExtensionTypeMeta): - """ - Base class for all user-defined types. - """ - - # These attributes are duplicated here with docstrings since a bug in - # sphinx prevents the docstrings of class attributes from being inherited - # properly (see https://github.com/sphinx-doc/sphinx/issues/741). The - # docstrings are not included anywhere else in the class hierarchy since - # this class is the only one exposed in the public API. - name = None - """ - `str` or `list`: The name of the type. - """ - - organization = "stsci.edu" - """ - `str`: The organization responsible for the type. - """ - - standard = "asdf" - """ - `str`: The standard the type is defined in. - """ - - version = (1, 0, 0) - """ - `str`, `tuple`, `asdf.versioning.AsdfVersion`, or `asdf.versioning.AsdfSpec`: - The version of the type. - """ - - supported_versions = set() - """ - `set`: Versions that explicitly compatible with this extension class. - - If provided, indicates explicit compatibility with the given set - of versions. Other versions of the same schema that are not included in - this set will not be converted to custom types with this class. """ - - types = [] - """ - `list`: List of types that this extension class can convert to/from YAML. - - Custom Python types that, when found in the tree, will be converted into - basic types for YAML output. Can be either strings referring to the types - or the types themselves.""" - - handle_dynamic_subclasses = False - """ - `bool`: Indicates whether dynamically generated subclasses can be serialized - - Flag indicating whether this type is capable of serializing subclasses - of any of the types listed in ``types`` that are generated dynamically. - """ - - validators = {} - """ - `dict`: Mapping JSON Schema keywords to validation functions for jsonschema. - - Useful if the type defines extra types of validation that can be - performed. - """ - - requires = [] - """ - `list`: Python packages that are required to instantiate the object. - """ - - yaml_tag = None - """ - `str`: The YAML tag to use for the type. - - If not provided, it will be automatically generated from name, - organization, standard and version. - """ - - has_required_modules = True - """ - `bool`: Indicates whether modules specified by `requires` are available. - - NOTE: This value is automatically generated. Do not set it in subclasses as - it will be overwritten. - """ - - def __init_subclass__(cls, **kwargs): - super().__init_subclass__(**kwargs) - - # Create a warning for a direct child of a CustomType class (not in grandchild) - if CustomType in cls.__bases__: - warnings.warn( - f"{cls.__name__} from {cls.__module__} subclasses the deprecated CustomType class. " - "Please see the new extension API " - "https://asdf.readthedocs.io/en/stable/asdf/extending/converters.html", - AsdfDeprecationWarning, - ) diff --git a/asdf/asdf.py b/asdf/asdf.py index 8414953e9..924274d8f 100644 --- a/asdf/asdf.py +++ b/asdf/asdf.py @@ -23,7 +23,7 @@ DelimiterNotFoundError, ValidationError, ) -from .extension import Extension, ExtensionProxy, _legacy, _serialization_context, get_cached_extension_manager +from .extension import Extension, ExtensionProxy, _serialization_context, get_cached_extension_manager from .search import AsdfSearchResult from .tags.core import AsdfObject, ExtensionMetadata, HistoryEntry, Software from .util import NotSet @@ -132,6 +132,8 @@ def __init__( files follow custom conventions beyond those enforced by the standard. """ + self._fname = "" + # Don't use the version setter here; it tries to access # the extensions, which haven't been assigned yet. if version is None: @@ -142,10 +144,9 @@ def __init__( self._user_extensions = self._process_user_extensions(extensions) self._plugin_extensions = self._process_plugin_extensions() self._extension_manager = None - self._extension_list_ = None if custom_schema is not None: - self._custom_schema = schema._load_schema_cached(custom_schema, self._resolver, True) + self._custom_schema = schema._load_schema_cached(custom_schema, None, True) else: self._custom_schema = None @@ -182,7 +183,6 @@ def __init__( # Set directly to self._tree (bypassing property), since # we can assume the other AsdfFile is already valid. self._tree = tree.tree - self._run_modifying_hook("copy_to_new_asdf", validate=False) self.find_references() else: self.tree = tree @@ -216,7 +216,6 @@ def version(self, value): self._user_extensions = self._process_user_extensions(self._user_extensions) self._plugin_extensions = self._process_plugin_extensions() self._extension_manager = None - self._extension_list_ = None @property def version_string(self): @@ -257,7 +256,6 @@ def extensions(self, value): """ self._user_extensions = self._process_user_extensions(value) self._extension_manager = None - self._extension_list_ = None @property def extension_manager(self): @@ -272,14 +270,6 @@ def extension_manager(self): self._extension_manager = get_cached_extension_manager(self._user_extensions + self._plugin_extensions) return self._extension_manager - @property - def _extension_list(self): - if self._extension_list_ is None: - self._extension_list_ = _legacy.get_cached_asdf_extension_list( - self._user_extensions + self._plugin_extensions, - ) - return self._extension_list_ - def __enter__(self): return self @@ -378,10 +368,8 @@ def _process_user_extensions(self, extensions): """ if extensions is None: extensions = [] - elif isinstance(extensions, (_legacy._AsdfExtension, Extension, ExtensionProxy)): + elif isinstance(extensions, (Extension, ExtensionProxy)): extensions = [extensions] - elif isinstance(extensions, _legacy.AsdfExtensionList): - extensions = extensions.extensions if not isinstance(extensions, list): msg = "The extensions parameter must be an extension or list of extensions" @@ -496,26 +484,6 @@ def uri(self): """ return self._blocks._uri - @property - def _tag_to_schema_resolver(self): - return self._extension_list.tag_mapping - - @property - def _tag_mapping(self): - return self._extension_list.tag_mapping - - @property - def _url_mapping(self): - return self._extension_list.url_mapping - - @property - def _resolver(self): - return self._extension_list.resolver - - @property - def _type_index(self): - return self._extension_list.type_index - def resolve_uri(self, uri): """ Resolve a (possibly relative) URI against the URI of this ASDF @@ -889,7 +857,6 @@ def _open_asdf( self._check_extensions(tree, strict=strict_extension_check) self._tree = tree - self._run_hook("post_read") return self @@ -978,8 +945,10 @@ def _tree_finalizer(tagged_tree): fd.fast_forward(padding) def _pre_write(self, fd): - if len(self._tree): - self._run_hook("pre_write") + pass + + def _post_write(self, fd): + pass def _serial_write(self, fd, pad_blocks, include_block_index): with self._blocks.write_context(fd): @@ -996,10 +965,6 @@ def _serial_write(self, fd, pad_blocks, include_block_index): finally: self._post_write(fd) - def _post_write(self, fd): - if len(self._tree): - self._run_hook("post_write") - def update( self, all_array_storage=NotSet, @@ -1252,36 +1217,6 @@ def resolve_references(self, **kwargs): # tree will be validated. self.tree = reference.resolve_references(self._tree, self) - def _run_hook(self, hookname): - type_index = self._type_index - - if not type_index.has_hook(hookname): - return - - for node in treeutil.iter_tree(self._tree): - hook = type_index.get_hook_for_type(hookname, type(node), self.version_string) - if hook is not None: - hook(node, self) - - def _run_modifying_hook(self, hookname, validate=True): - type_index = self._type_index - - if not type_index.has_hook(hookname): - return None - - def walker(node): - hook = type_index.get_hook_for_type(hookname, type(node), self.version_string) - if hook is not None: - return hook(node, self) - return node - - tree = treeutil.walk_and_modify(self.tree, walker, ignore_implicit_conversion=self._ignore_implicit_conversion) - - if validate: - self._validate(tree) - self._tree = tree - return self._tree - def resolve_and_inline(self): """ Resolves all external references and inlines all data. This diff --git a/asdf/commands/tags.py b/asdf/commands/tags.py index 60f3e405c..3529d6054 100644 --- a/asdf/commands/tags.py +++ b/asdf/commands/tags.py @@ -53,8 +53,6 @@ def list_tags(display_classes=False, iostream=sys.stdout): tag_pairs = [] for tag in af.extension_manager._converters_by_tag: tag_pairs.append((tag, af.extension_manager.get_converter_for_tag(tag).types)) - for tag in af._type_index._type_by_tag: - tag_pairs.append((tag, [af._type_index._type_by_tag[tag]])) for tag, types in sorted(tag_pairs, key=lambda pair: pair[0]): string = str(tag) diff --git a/asdf/core/_extensions.py b/asdf/core/_extensions.py index 059ea4e5a..205fd23f0 100644 --- a/asdf/core/_extensions.py +++ b/asdf/core/_extensions.py @@ -48,4 +48,9 @@ ] -EXTENSIONS = [ManifestExtension.from_uri(u, converters=CONVERTERS, validators=VALIDATORS) for u in MANIFEST_URIS] +EXTENSIONS = [ + ManifestExtension.from_uri( + u, converters=CONVERTERS, validators=VALIDATORS, legacy_class_names=["asdf.extension.BuiltinExtension"] + ) + for u in MANIFEST_URIS +] diff --git a/asdf/entry_points.py b/asdf/entry_points.py index 3052de763..f8d73283a 100644 --- a/asdf/entry_points.py +++ b/asdf/entry_points.py @@ -13,7 +13,6 @@ RESOURCE_MAPPINGS_GROUP = "asdf.resource_mappings" EXTENSIONS_GROUP = "asdf.extensions" -LEGACY_EXTENSIONS_GROUP = "asdf_extensions" def get_resource_mappings(): @@ -22,8 +21,7 @@ def get_resource_mappings(): def get_extensions(): extensions = _list_entry_points(EXTENSIONS_GROUP, ExtensionProxy) - legacy_extensions = _list_entry_points(LEGACY_EXTENSIONS_GROUP, ExtensionProxy) - return extensions + legacy_extensions + return extensions def _list_entry_points(group, proxy_class): @@ -54,10 +52,6 @@ def _handle_error(e): # Catch errors loading entry points and warn instead of raising try: with warnings.catch_warnings(): - if entry_point.group == LEGACY_EXTENSIONS_GROUP and entry_point.name != "builtin": - # for now, the builtin extension is still registered via asdf_extensions - # so we only load this legacy extension and ignore all non-builtin extensions - continue elements = entry_point.load()() except Exception as e: diff --git a/asdf/extension/_extension.py b/asdf/extension/_extension.py index d19ce81ac..3e5006cee 100644 --- a/asdf/extension/_extension.py +++ b/asdf/extension/_extension.py @@ -6,7 +6,6 @@ from ._compressor import Compressor from ._converter import ConverterProxy -from ._legacy import _AsdfExtension from ._tag import TagDefinition from ._validator import Validator @@ -131,7 +130,7 @@ def validators(self): return [] -class ExtensionProxy(Extension, _AsdfExtension): +class ExtensionProxy(Extension): """ Proxy that wraps an extension, provides default implementations of optional methods, and carries additional information on the @@ -146,7 +145,7 @@ def maybe_wrap(cls, delegate): return ExtensionProxy(delegate) def __init__(self, delegate, package_name=None, package_version=None): - if not isinstance(delegate, (Extension, _AsdfExtension)): + if not isinstance(delegate, Extension): msg = "Extension must implement the Extension interface" raise TypeError(msg) @@ -156,7 +155,7 @@ def __init__(self, delegate, package_name=None, package_version=None): self._class_name = get_class_name(delegate) - self._legacy = isinstance(delegate, _AsdfExtension) + self._legacy = False # Sort these out up-front so that errors are raised when the extension is loaded # and not in the middle of the user's session. The extension will fail to load @@ -367,12 +366,7 @@ def class_name(self): @property def legacy(self): """ - Get the extension's legacy flag. Subclasses of ``asdf.extension._AsdfExtension`` - are marked `True`. - - Returns - ------- - bool + False """ return self._legacy diff --git a/asdf/extension/_legacy.py b/asdf/extension/_legacy.py deleted file mode 100644 index b97b20e24..000000000 --- a/asdf/extension/_legacy.py +++ /dev/null @@ -1,258 +0,0 @@ -import abc -import warnings -from functools import lru_cache - -from asdf import _resolver as resolver -from asdf import _types as types -from asdf._type_index import AsdfTypeIndex -from asdf.exceptions import AsdfDeprecationWarning - -__all__ = ["_AsdfExtension"] - - -class _AsdfExtension(metaclass=abc.ABCMeta): - """ - Abstract base class defining a (legacy) extension to ASDF. - New code should use `asdf.extension.Extension` instead. - """ - - @classmethod - def __subclasshook__(cls, class_): - if cls is _AsdfExtension: - return hasattr(class_, "types") and hasattr(class_, "tag_mapping") - return NotImplemented - - @property - @abc.abstractmethod - def types(self): - """ - A list of `asdf.CustomType` subclasses that describe how to store - custom objects to and from ASDF. - """ - - @property - @abc.abstractmethod - def tag_mapping(self): - """ - A list of 2-tuples or callables mapping YAML tag prefixes to JSON Schema - URL prefixes. - - For each entry: - - - If a 2-tuple, the first part of the tuple is a YAML tag - prefix to match. The second part is a string, where case - the following are available as Python formatting tokens: - - - ``{tag}``: the complete YAML tag. - - ``{tag_suffix}``: the part of the YAML tag after the - matched prefix. - - ``{tag_prefix}``: the matched YAML tag prefix. - - - If a callable, it is passed the entire YAML tag must return - the entire JSON schema URL if it matches, otherwise, return `None`. - - Note that while JSON Schema URLs uniquely define a JSON - Schema, they do not have to actually exist on an HTTP server - and be fetchable (much like XML namespaces). - - For example, to match all YAML tags with the - ``tag:nowhere.org:custom` prefix to the - ``http://nowhere.org/schemas/custom/`` URL prefix:: - - return [('tag:nowhere.org:custom/', - 'http://nowhere.org/schemas/custom/{tag_suffix}')] - """ - - @property - @abc.abstractmethod - def url_mapping(self): - """ - Schema content can be provided using the resource Mapping API. - - A list of 2-tuples or callables mapping JSON Schema URLs to - other URLs. This is useful if the JSON Schemas are not - actually fetchable at their corresponding URLs but are on the - local filesystem, or, to save bandwidth, we have a copy of - fetchable schemas on the local filesystem. If neither is - desirable, it may simply be the empty list. - - For each entry: - - - If a 2-tuple, the first part is a URL prefix to match. The - second part is a string, where the following are available - as Python formatting tokens: - - - ``{url}``: The entire JSON schema URL - - ``{url_prefix}``: The matched URL prefix - - ``{url_suffix}``: The part of the URL after the prefix. - - - If a callable, it is passed the entire JSON Schema URL and - must return a resolvable URL pointing to the schema content. - If it doesn't match, should return `None`. - - For example, to map a remote HTTP URL prefix to files installed - alongside as data alongside Python module:: - - return [('http://nowhere.org/schemas/custom/1.0.0/', - asdf.util.filepath_to_url( - os.path.join(SCHEMA_PATH, 'stsci.edu')) + - '/{url_suffix}.yaml' - )] - """ - - -class AsdfExtensionList: - """ - Manage a set of extensions that are in effect. - """ - - def __init__(self, extensions): - from ._extension import ExtensionProxy - - extensions = [ExtensionProxy.maybe_wrap(e) for e in extensions] - - tag_mapping = [] - url_mapping = [] - validators = {} - self._type_index = AsdfTypeIndex() - for extension in extensions: - tag_mapping.extend(extension.tag_mapping) - url_mapping.extend(extension.url_mapping) - for typ in extension.types: - self._type_index.add_type(typ, extension) - validators.update(typ.validators) - for sibling in typ.versioned_siblings: - self._type_index.add_type(sibling, extension) - validators.update(sibling.validators) - self._extensions = extensions - self._tag_mapping = resolver.Resolver(tag_mapping, "tag") - self._url_mapping = resolver.Resolver(url_mapping, "url") - self._resolver = resolver.ResolverChain(self._tag_mapping, self._url_mapping) - self._validators = validators - - @property - def tag_to_schema_resolver(self): - """Deprecated. Use `tag_mapping` instead""" - warnings.warn( - "The 'tag_to_schema_resolver' property is deprecated. Use 'tag_mapping' instead.", - AsdfDeprecationWarning, - ) - return self._tag_mapping - - @property - def extensions(self): - return self._extensions - - @property - def tag_mapping(self): - return self._tag_mapping - - @property - def url_mapping(self): - return self._url_mapping - - @property - def resolver(self): - return self._resolver - - @property - def type_index(self): - return self._type_index - - @property - def validators(self): - return self._validators - - -def get_cached_asdf_extension_list(extensions): - """ - Get a previously created AsdfExtensionList for the specified - extensions, or create and cache one if necessary. Building - the type index is expensive, so it helps performance to reuse - the index when possible. - - Parameters - ---------- - extensions : list of asdf.extension._AsdfExtension - - Returns - ------- - asdf.extension.AsdfExtensionList - """ - from ._extension import ExtensionProxy - - # The tuple makes the extensions hashable so that we - # can pass them to the lru_cache method. The ExtensionProxy - # overrides __hash__ to return the hashed object id of the wrapped - # extension, so this will method will only return the same - # AsdfExtensionList if the list contains identical extension - # instances in identical order. - extensions = tuple(ExtensionProxy.maybe_wrap(e) for e in extensions) - - return _get_cached_asdf_extension_list(extensions) - - -@lru_cache -def _get_cached_asdf_extension_list(extensions): - return AsdfExtensionList(extensions) - - -# A kludge in asdf.util.get_class_name allows this class to retain -# its original name, despite being moved from extension.py to -# this file. -class BuiltinExtension: - """ - This is the "extension" to ASDF that includes all the built-in - tags. Even though it's not really an extension and it's always - available, it's built in the same way as an extension. - """ - - @property - def types(self): - return types._all_asdftypes - - @property - def tag_mapping(self): - return resolver.DEFAULT_TAG_TO_URL_MAPPING - - @property - def url_mapping(self): - return resolver.DEFAULT_URL_MAPPING - - -class _DefaultExtensions: - @property - def extensions(self): - from asdf.config import get_config - - return [e for e in get_config().extensions if e.legacy] - - @property - def extension_list(self): - return get_cached_asdf_extension_list(self.extensions) - - @property - def package_metadata(self): - return { - e.class_name: (e.package_name, e.package_version) for e in self.extensions if e.package_name is not None - } - - def reset(self): - """This will be used primarily for testing purposes.""" - from asdf.config import get_config - - get_config().reset_extensions() - - @property - def resolver(self): - return self.extension_list.resolver - - -default_extensions = _DefaultExtensions() - - -def get_default_resolver(): - """ - Get the resolver that includes mappings from all installed extensions. - """ - return default_extensions.resolver diff --git a/asdf/schema.py b/asdf/schema.py index 96c0f6b48..477fc1000 100644 --- a/asdf/schema.py +++ b/asdf/schema.py @@ -16,8 +16,7 @@ from . import constants, generic_io, reference, tagged, treeutil, util, versioning, yamlutil from .config import get_config -from .exceptions import AsdfWarning -from .extension import _legacy +from .exceptions import AsdfDeprecationWarning, AsdfWarning from .util import patched_urllib_parse YAML_SCHEMA_METASCHEMA_ID = "http://stsci.edu/schemas/yaml-schema/draft-01" @@ -50,6 +49,16 @@ def _type_to_tag(type_): return None +def _tag_to_uri(input_str): + if not input_str.startswith(constants.STSCI_SCHEMA_TAG_BASE): + return input_str + warnings.warn( + "Resolving by tag is deprecated. Use uris instead of tags", + AsdfDeprecationWarning, + ) + return f"http://stsci.edu/schemas/asdf{input_str[len(constants.STSCI_SCHEMA_TAG_BASE):]}" + + def validate_tag(validator, tag_pattern, instance, schema): """ Implements the tag validation directive, which checks the @@ -230,7 +239,7 @@ def _make_seen_key(self, instance, schema): @lru_cache def _create_validator(validators=YAML_VALIDATORS, visit_repeat_nodes=False): - meta_schema = _load_schema_cached(YAML_SCHEMA_METASCHEMA_ID, _legacy.get_default_resolver(), False) + meta_schema = _load_schema_cached(YAML_SCHEMA_METASCHEMA_ID, _tag_to_uri, False) type_checker = mvalidators.Draft4Validator.TYPE_CHECKER.redefine_many( { @@ -282,14 +291,9 @@ def iter_errors(self, instance, *args, **kwargs): if not self.schema: tag = getattr(instance, "_tag", None) - if tag is not None: - if self.serialization_context.extension_manager.handles_tag_definition(tag): - tag_def = self.serialization_context.extension_manager.get_tag_definition(tag) - schema_uris = tag_def.schema_uris - else: - schema_uris = [self.ctx._tag_mapping(tag)] - if schema_uris[0] == tag: - schema_uris = [] + if tag is not None and self.serialization_context.extension_manager.handles_tag_definition(tag): + tag_def = self.serialization_context.extension_manager.get_tag_definition(tag) + schema_uris = tag_def.schema_uris # Must validate against all schema_uris for schema_uri in schema_uris: @@ -335,6 +339,9 @@ def _load_schema(url): def _make_schema_loader(resolver): + if resolver is None: + resolver = _tag_to_uri + def load_schema(url): # Check if this is a URI provided by the new # Mapping API: @@ -362,7 +369,7 @@ def load_schema(url): return load_schema -def _make_resolver(url_mapping): +def _make_jsonschema_refresolver(url_mapping): handlers = {} schema_loader = _make_schema_loader(url_mapping) @@ -409,9 +416,7 @@ def load_schema(url, resolver=None, resolve_references=False): """ if resolver is None: - # We can't just set this as the default in load_schema's definition - # because invoking get_default_resolver at import time leads to a circular import. - resolver = _legacy.get_default_resolver() + resolver = _tag_to_uri # We want to cache the work that went into constructing the schema, but returning # the same object is treacherous, because users who mutate the result will not @@ -459,6 +464,8 @@ def _safe_resolve(resolver, json_id, uri): @lru_cache def _load_schema_cached(url, resolver, resolve_references): + if resolver is None: + resolver = _tag_to_uri loader = _make_schema_loader(resolver) schema, url = loader(url) @@ -540,10 +547,9 @@ def get_validator( if validators is None: validators = util.HashableDict(YAML_VALIDATORS.copy()) - validators.update(ctx._extension_list.validators) validators.update(ctx._extension_manager.validator_manager.get_jsonschema_validators()) - kwargs["resolver"] = _make_resolver(url_mapping) + kwargs["resolver"] = _make_jsonschema_refresolver(url_mapping) # We don't just call validators.validate() directly here, because # that validates the schema itself, wasting a lot of time (at the @@ -639,7 +645,7 @@ def validate(instance, ctx=None, schema=None, validators=None, reading=False, *a ctx = AsdfFile() - validator = get_validator({} if schema is None else schema, ctx, validators, ctx._resolver, *args, **kwargs) + validator = get_validator({} if schema is None else schema, ctx, validators, None, *args, **kwargs) validator.validate(instance) additional_validators = [_validate_large_literals] @@ -729,9 +735,9 @@ def applicable_validators(schema): applicable_validators = methodcaller("items") meta_schema_id = schema.get("$schema", YAML_SCHEMA_METASCHEMA_ID) - meta_schema = _load_schema_cached(meta_schema_id, _legacy.get_default_resolver(), False) + meta_schema = _load_schema_cached(meta_schema_id, _tag_to_uri, False) - resolver = _make_resolver(_legacy.get_default_resolver()) + resolver = _make_jsonschema_refresolver(_tag_to_uri) cls = mvalidators.create( meta_schema=meta_schema, diff --git a/asdf/util.py b/asdf/util.py index 7d6dc1d69..47b26ffef 100644 --- a/asdf/util.py +++ b/asdf/util.py @@ -310,14 +310,6 @@ def resolve_name(name): return ret -# Kludge to cover up the fact that BuiltinExtension was moved from extension.py -# to extension/_legacy.py. Can be removed once BuiltinExtension is dropped -# in asdf 3.0. -_CLASS_NAME_OVERRIDES = { - "asdf.extension._legacy.BuiltinExtension": "asdf.extension.BuiltinExtension", -} - - def get_class_name(obj, instance=True): """ Given a class or instance of a class, returns a string representing the @@ -332,8 +324,7 @@ def get_class_name(obj, instance=True): Indicates whether given object is an instance of the class to be named """ typ = type(obj) if instance else obj - class_name = f"{typ.__module__}.{typ.__qualname__}" - return _CLASS_NAME_OVERRIDES.get(class_name, class_name) + return f"{typ.__module__}.{typ.__qualname__}" def minversion(module, version, inclusive=True): diff --git a/asdf/yamlutil.py b/asdf/yamlutil.py index 0fd20b986..67b681ef8 100644 --- a/asdf/yamlutil.py +++ b/asdf/yamlutil.py @@ -10,7 +10,7 @@ from .exceptions import AsdfConversionWarning from .extension._serialization_context import BlockAccess from .tags.core import AsdfObject -from .versioning import _yaml_base_loader, split_tag_version +from .versioning import _yaml_base_loader __all__ = ["custom_tree_to_tagged_tree", "tagged_tree_to_custom_tree"] @@ -220,7 +220,6 @@ def custom_tree_to_tagged_tree(tree, ctx, _serialization_context=None): _serialization_context = ctx._create_serialization_context(BlockAccess.WRITE) extension_manager = _serialization_context.extension_manager - version_string = str(_serialization_context.version) def _convert_obj(obj, converter): tag = converter.select_tag(obj, _serialization_context) @@ -290,16 +289,6 @@ def _walker(obj): converters_cache[typ] = lambda obj, _converter=converter: _convert_obj(obj, _converter) return _convert_obj(obj, converter) - tag = ctx._type_index.from_custom_type( - typ, - version_string, - _serialization_context=_serialization_context, - ) - - if tag is not None: - converters_cache[typ] = lambda obj, _tag=tag: _tag.to_tree_tagged(obj, ctx) - return tag.to_tree_tagged(obj, ctx) - converters_cache[typ] = lambda obj: obj return obj @@ -340,40 +329,11 @@ def _walker(node): _serialization_context._mark_extension_used(converter.extension) return obj - tag_type = ctx._type_index.from_yaml_tag(ctx, tag, _serialization_context=_serialization_context) - # This means the tag did not correspond to any type in our type index. - if tag_type is None: - if not ctx._ignore_unrecognized_tag: - warnings.warn( - f"{tag} is not recognized, converting to raw Python data structure", - AsdfConversionWarning, - ) - return node - - tag_name, tag_version = split_tag_version(tag) - # This means that there is an explicit description of versions that are - # compatible with the associated tag class implementation, but the - # version we found does not fit that description. - if tag_type.incompatible_version(tag_version): + if not ctx._ignore_unrecognized_tag: warnings.warn( - f"Version {tag_version} of {tag_name} is not compatible with any existing tag implementations", + f"{tag} is not recognized, converting to raw Python data structure", AsdfConversionWarning, ) - return node - - # If a tag class does not explicitly list compatible versions, then all - # versions of the corresponding schema are assumed to be compatible. - # Therefore we need to check to make sure whether the conversion is - # actually successful, and just return a raw Python data type if it is - # not. - try: - return tag_type.from_tree_tagged(node, ctx) - except TypeError as err: - warnings.warn( - f"Failed to convert {tag} to custom type (detail: {err}). Using raw Python data structure instead", - AsdfConversionWarning, - ) - return node return treeutil.walk_and_modify( diff --git a/docs/asdf/developer_overview.rst b/docs/asdf/developer_overview.rst index 057d13e7e..1e2e980db 100644 --- a/docs/asdf/developer_overview.rst +++ b/docs/asdf/developer_overview.rst @@ -434,7 +434,7 @@ that.) **_make_schema_loader:** Defines the function load_schema using the provided resolver and _load_schema. -**_make_resolver:** Sets the schema loader for http, https, file, tag using a +**_make_jsonschema_refresolver:** Sets the schema loader for http, https, file, tag using a dictionary where these access methods are the keys and the schema loader returning only the schema (and not the uri). These all appear to use the same schema loader.