Skip to content

Commit

Permalink
Merge pull request #1688 from braingram/cleanup_util
Browse files Browse the repository at this point in the history
Cleanup `asdf.util`
  • Loading branch information
braingram authored Dec 5, 2023
2 parents 97fc5df + a699ee9 commit 0231fe2
Show file tree
Hide file tree
Showing 15 changed files with 149 additions and 57 deletions.
9 changes: 9 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
3.1.0 (unreleased)
------------------

The ASDF Standard is at v1.6.0
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

- Cleanup ``asdf.util`` including deprecating: ``human_list``
``resolve_name`` ``minversion`` and ``iter_subclasses`` [#1688]

3.0.1 (2023-10-30)
------------------

Expand Down
4 changes: 2 additions & 2 deletions asdf/_block/external.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def load(self, base_uri, uri, memmap=False, validate_checksums=False):
) as af:
blk = af._blocks.blocks[0]
if memmap and blk.header["compression"] == b"\0\0\0\0":
parsed_url = util.patched_urllib_parse.urlparse(resolved_uri)
parsed_url = util._patched_urllib_parse.urlparse(resolved_uri)
if parsed_url.scheme == "file":
# deal with leading slash for windows file://
filename = urllib.request.url2pathname(parsed_url.path)
Expand All @@ -58,7 +58,7 @@ def clear(self):

def relative_uri_for_index(uri, index):
# get the os-native separated path for this uri
path = util.patched_urllib_parse.urlparse(uri).path
path = util._patched_urllib_parse.urlparse(uri).path
dirname, filename = os.path.split(path)
filename = os.path.splitext(filename)[0] + f"{index:04d}.asdf"
return filename
4 changes: 2 additions & 2 deletions asdf/_block/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

from .exceptions import BlockIndexError

BLOCK_HEADER = util.BinaryStruct(
BLOCK_HEADER = util._BinaryStruct(
[
("flags", "I"),
("compression", "4s"),
Expand Down Expand Up @@ -83,7 +83,7 @@ def read_block_header(fd, offset=None):
-------
header : dict
Dictionary containing the read ASDF header as parsed by the
`BLOCK_HEADER` `asdf.util.BinaryStruct`.
`BLOCK_HEADER` `asdf.util._BinaryStruct`.
Raises
------
Expand Down
21 changes: 21 additions & 0 deletions asdf/_tests/test_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import pytest

import asdf
from asdf.exceptions import AsdfDeprecationWarning


Expand All @@ -15,3 +16,23 @@ def test_asdf_stream_deprecation():
def test_asdf_asdf_SerializationContext_import_deprecation():
with pytest.warns(AsdfDeprecationWarning, match="importing SerializationContext from asdf.asdf"):
from asdf.asdf import SerializationContext # noqa: F401


def test_asdf_util_human_list_deprecation():
with pytest.warns(AsdfDeprecationWarning, match="asdf.util.human_list is deprecated"):
asdf.util.human_list("a")


def test_asdf_util_resolve_name_deprecation():
with pytest.warns(AsdfDeprecationWarning, match="asdf.util.resolve_name is deprecated"):
asdf.util.resolve_name("asdf.AsdfFile")


def test_asdf_util_minversion_deprecation():
with pytest.warns(AsdfDeprecationWarning, match="asdf.util.minversion is deprecated"):
asdf.util.minversion("yaml", "3.1")


def test_asdf_util_iter_subclasses_deprecation():
with pytest.warns(AsdfDeprecationWarning, match="asdf.util.iter_subclasses is deprecated"):
list(asdf.util.iter_subclasses(asdf.AsdfFile))
28 changes: 16 additions & 12 deletions asdf/_tests/test_util.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import io
import warnings

import pytest

from asdf import generic_io, util
from asdf.exceptions import AsdfDeprecationWarning


def test_is_primitive():
Expand Down Expand Up @@ -35,12 +37,12 @@ def test_get_class_name():


def test_patched_urllib_parse():
assert "asdf" in util.patched_urllib_parse.uses_relative
assert "asdf" in util.patched_urllib_parse.uses_netloc
assert "asdf" in util._patched_urllib_parse.uses_relative
assert "asdf" in util._patched_urllib_parse.uses_netloc

import urllib.parse

assert urllib.parse is not util.patched_urllib_parse
assert urllib.parse is not util._patched_urllib_parse
assert "asdf" not in urllib.parse.uses_relative
assert "asdf" not in urllib.parse.uses_netloc

Expand Down Expand Up @@ -103,12 +105,14 @@ def test_minversion():

good_versions = ["1.16", "1.16.1", "1.16.0.dev", "1.16dev"]
bad_versions = ["100000", "100000.2rc1"]
for version in good_versions:
assert util.minversion(np, version)
assert util.minversion("numpy", version)
for version in bad_versions:
assert not util.minversion(np, version)
assert not util.minversion("numpy", version)

assert util.minversion(yaml, "3.1")
assert util.minversion("yaml", "3.1")
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "asdf.util.minversion", AsdfDeprecationWarning)
for version in good_versions:
assert util.minversion(np, version)
assert util.minversion("numpy", version)
for version in bad_versions:
assert not util.minversion(np, version)
assert not util.minversion("numpy", version)

assert util.minversion(yaml, "3.1")
assert util.minversion("yaml", "3.1")
31 changes: 29 additions & 2 deletions asdf/commands/diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import asdf
from asdf.extension._serialization_context import BlockAccess
from asdf.tagged import Tagged
from asdf.util import human_list

from .main import Command

Expand Down Expand Up @@ -256,6 +255,34 @@ def _load_array(asdf_file, array_dict):
return conv.from_yaml_tree(array_dict, array_dict._tag, sctx)


def _human_list(line, separator="and"):
"""
Formats a list for human readability.
Parameters
----------
line : sequence
A sequence of strings
separator : string, optional
The word to use between the last two entries. Default:
``"and"``.
Returns
-------
formatted_list : string
Examples
--------
>>> _human_list(["vanilla", "strawberry", "chocolate"], "or")
'vanilla, strawberry or chocolate'
"""
if len(line) == 1:
return line[0]

return ", ".join(line[:-1]) + " " + separator + " " + line[-1]


def compare_ndarrays(diff_ctx, array0, array1, keys):
"""Compares two ndarray objects"""
if isinstance(array0, list):
Expand All @@ -278,7 +305,7 @@ def compare_ndarrays(diff_ctx, array0, array1, keys):
differences.append("contents")

if differences:
msg = f"ndarrays differ by {human_list(differences)}"
msg = f"ndarrays differ by {_human_list(differences)}"
print_in_tree(diff_ctx, keys, msg, False, ignore_lwl=True)
print_in_tree(diff_ctx, keys, msg, True, ignore_lwl=True)

Expand Down
2 changes: 1 addition & 1 deletion asdf/commands/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def help_(args):
help_parser = subparsers.add_parser("help", help="Display usage information")
help_parser.set_defaults(func=help_)

commands = {x.__name__: x for x in util.iter_subclasses(Command)}
commands = {x.__name__: x for x in util._iter_subclasses(Command)}

for command in command_order:
commands[str(command)].setup_arguments(subparsers)
Expand Down
2 changes: 1 addition & 1 deletion asdf/core/_converters/complex.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
class ComplexConverter(Converter):
tags = ["tag:stsci.edu:asdf/core/complex-1.0.0"]

types = [*list(util.iter_subclasses(np.complexfloating)), complex]
types = [*list(util._iter_subclasses(np.complexfloating)), complex]

def to_yaml_tree(self, obj, tag, ctx):
return str(obj)
Expand Down
16 changes: 8 additions & 8 deletions asdf/generic_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from . import util
from .exceptions import DelimiterNotFoundError
from .extern import atomicfile
from .util import patched_urllib_parse
from .util import _patched_urllib_parse

__all__ = ["get_file", "get_uri", "resolve_uri", "relative_uri"]

Expand Down Expand Up @@ -69,8 +69,8 @@ def resolve_uri(base, uri):
"""
if base is None:
base = ""
resolved = patched_urllib_parse.urljoin(base, uri)
parsed = patched_urllib_parse.urlparse(resolved)
resolved = _patched_urllib_parse.urljoin(base, uri)
parsed = _patched_urllib_parse.urlparse(resolved)
if parsed.path != "" and not parsed.path.startswith("/"):
msg = "Resolved to relative URL"
raise ValueError(msg)
Expand All @@ -81,8 +81,8 @@ def relative_uri(source, target):
"""
Make a relative URI from source to target.
"""
su = patched_urllib_parse.urlparse(source)
tu = patched_urllib_parse.urlparse(target)
su = _patched_urllib_parse.urlparse(source)
tu = _patched_urllib_parse.urlparse(target)
extra = list(tu[3:])
relative = None
if tu[0] == "" and tu[1] == "":
Expand All @@ -98,7 +98,7 @@ def relative_uri(source, target):
if relative == ".":
relative = ""

return patched_urllib_parse.urlunparse(["", "", relative, *extra])
return _patched_urllib_parse.urlunparse(["", "", relative, *extra])


class _TruncatedReader:
Expand Down Expand Up @@ -187,7 +187,7 @@ def read(self, nbytes=None):
return content


class GenericFile(metaclass=util.InheritDocstrings):
class GenericFile(metaclass=util._InheritDocstrings):
"""
Base class for an abstraction layer around a number of different
file-like types. Each of its subclasses handles a particular kind
Expand Down Expand Up @@ -1094,7 +1094,7 @@ def get_file(init, mode="r", uri=None, close=False):
return GenericWrapper(init)

if isinstance(init, (str, pathlib.Path)):
parsed = patched_urllib_parse.urlparse(str(init))
parsed = _patched_urllib_parse.urlparse(str(init))
if parsed.scheme in ["http", "https"]:
if "w" in mode:
msg = "HTTP connections can not be opened for writing"
Expand Down
6 changes: 3 additions & 3 deletions asdf/reference.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import numpy as np

from . import generic_io, treeutil, util
from .util import patched_urllib_parse
from .util import _patched_urllib_parse

__all__ = ["resolve_fragment", "Reference", "find_references", "resolve_references", "make_reference"]

Expand All @@ -22,7 +22,7 @@ def resolve_fragment(tree, pointer):
Resolve a JSON Pointer within the tree.
"""
pointer = pointer.lstrip("/")
parts = patched_urllib_parse.unquote(pointer).split("/") if pointer else []
parts = _patched_urllib_parse.unquote(pointer).split("/") if pointer else []

for part in parts:
part_ = part.replace("~1", "/").replace("~0", "~")
Expand Down Expand Up @@ -57,7 +57,7 @@ def _get_target(self, **kwargs):
base_uri = self._asdffile().uri
uri = generic_io.resolve_uri(base_uri, self._uri)
asdffile = self._asdffile().open_external(uri, **kwargs)
parts = patched_urllib_parse.urlparse(self._uri)
parts = _patched_urllib_parse.urlparse(self._uri)
fragment = parts.fragment
self._target = resolve_fragment(asdffile.tree, fragment)
return self._target
Expand Down
4 changes: 2 additions & 2 deletions asdf/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from . import constants, generic_io, reference, tagged, treeutil, util, versioning, yamlutil
from .config import get_config
from .exceptions import AsdfDeprecationWarning, AsdfWarning
from .util import patched_urllib_parse
from .util import _patched_urllib_parse

YAML_SCHEMA_METASCHEMA_ID = "http://stsci.edu/schemas/yaml-schema/draft-01"

Expand Down Expand Up @@ -381,7 +381,7 @@ def get_schema(url):

# Supplying our own implementation of urljoin_cache
# allows asdf:// URIs to be resolved correctly.
urljoin_cache = lru_cache(1024)(patched_urllib_parse.urljoin)
urljoin_cache = lru_cache(1024)(_patched_urllib_parse.urljoin)

# We set cache_remote=False here because we do the caching of
# remote schemas here in `load_schema`, so we don't need
Expand Down
Loading

0 comments on commit 0231fe2

Please sign in to comment.