From a1f659f96ae5ac6f4a40c5fa8b8107bcd5459789 Mon Sep 17 00:00:00 2001 From: libretto Date: Sat, 9 Nov 2024 20:22:41 +0200 Subject: [PATCH] replace unions way with avro library --- src/karapace/schema_models.py | 59 +++--- tests/test_avro_references.py | 356 ++++++++++++++++++++++++++++++++++ tests/unit/test_avro_merge.py | 76 -------- 3 files changed, 379 insertions(+), 112 deletions(-) create mode 100644 tests/test_avro_references.py delete mode 100644 tests/unit/test_avro_merge.py diff --git a/src/karapace/schema_models.py b/src/karapace/schema_models.py index b56fcc123..7bfd7c4d8 100644 --- a/src/karapace/schema_models.py +++ b/src/karapace/schema_models.py @@ -5,7 +5,8 @@ from __future__ import annotations from avro.errors import SchemaParseException -from avro.schema import parse as avro_parse, Schema as AvroSchema +from avro.name import Names as AvroNames +from avro.schema import make_avsc_object, parse as avro_parse, Schema as AvroSchema from collections.abc import Collection, Mapping, Sequence from dataclasses import dataclass from jsonschema import Draft7Validator @@ -29,8 +30,8 @@ from karapace.utils import assert_never, json_decode, json_encode, JSONDecodeError from typing import Any, cast, Final, final -import avro.schema import hashlib +import json import logging import re @@ -198,28 +199,17 @@ def schema(self) -> Draft7Validator | AvroSchema | ProtobufSchema: return parsed_typed_schema.schema -class AvroMerge: +class AvroResolver: def __init__(self, schema_str: str, dependencies: Mapping[str, Dependency] | None = None): self.schema_str = json_encode(json_decode(schema_str), compact=True, sort_keys=True) self.dependencies = dependencies self.unique_id = 0 self.regex = re.compile(r"^\s*\[") - def union_safe_schema_str(self, schema_str: str) -> str: - # in case we meet union - we use it as is - - base_schema = ( - f'{{"name": "___RESERVED_KARAPACE_WRAPPER_NAME_{self.unique_id}___",' - f'"type": "record", "fields": [{{"name": "name", "type":' - ) - if self.regex.match(schema_str): - return f"{base_schema} {schema_str}}}]}}" - return f"{base_schema} [{schema_str}]}}]}}" - - def builder(self, schema_str: str, dependencies: Mapping[str, Dependency] | None = None) -> str: + def builder(self, schema_str: str, dependencies: Mapping[str, Dependency] | None = None) -> list: """To support references in AVRO we iteratively merge all referenced schemas with current schema""" stack: list[tuple[str, Mapping[str, Dependency] | None]] = [(schema_str, dependencies)] - merged_schemas = [] + merge: list = [] while stack: current_schema_str, current_dependencies = stack.pop() @@ -229,12 +219,15 @@ def builder(self, schema_str: str, dependencies: Mapping[str, Dependency] | None stack.append((dependency.schema.schema_str, dependency.schema.dependencies)) else: self.unique_id += 1 - merged_schemas.append(self.union_safe_schema_str(current_schema_str)) + merge.append(current_schema_str) - return ",\n".join(merged_schemas) + return merge - def wrap(self) -> str: - return "[\n" + self.builder(self.schema_str, self.dependencies) + "\n]" + def resolve(self) -> list: + """Resolve the given ``schema_str`` with ``dependencies`` to a list of schemas + sorted in an order where all referenced schemas are located prior to their referrers. + """ + return self.builder(self.schema_str, self.dependencies) def parse( @@ -249,34 +242,30 @@ def parse( ) -> ParsedTypedSchema: if schema_type not in [SchemaType.AVRO, SchemaType.JSONSCHEMA, SchemaType.PROTOBUF]: raise InvalidSchema(f"Unknown parser {schema_type} for {schema_str}") - parsed_schema_result: Draft7Validator | AvroSchema | ProtobufSchema parsed_schema: Draft7Validator | AvroSchema | ProtobufSchema if schema_type is SchemaType.AVRO: try: if dependencies: - wrapped_schema_str = AvroMerge(schema_str, dependencies).wrap() + schemas_list = AvroResolver(schema_str, dependencies).resolve() + names = AvroNames(validate_names=validate_avro_names) + merged_schema = None + for schema in schemas_list: + # Merge dep with all previously merged ones + merged_schema = make_avsc_object(json.loads(schema), names) + merged_schema_str = str(merged_schema) else: - wrapped_schema_str = schema_str + merged_schema_str = schema_str parsed_schema = parse_avro_schema_definition( - wrapped_schema_str, + merged_schema_str, validate_enum_symbols=validate_avro_enum_symbols, validate_names=validate_avro_names, ) - if dependencies: - if isinstance(parsed_schema, avro.schema.UnionSchema): - parsed_schema_result = parsed_schema.schemas[-1].fields[0].type.schemas[-1] - - else: - raise InvalidSchema - else: - parsed_schema_result = parsed_schema return ParsedTypedSchema( schema_type=schema_type, schema_str=schema_str, - schema=parsed_schema_result, + schema=parsed_schema, references=references, dependencies=dependencies, - schema_wrapped=parsed_schema, ) except (SchemaParseException, JSONDecodeError, TypeError) as e: raise InvalidSchema from e @@ -346,10 +335,8 @@ def __init__( schema: Draft7Validator | AvroSchema | ProtobufSchema, references: Sequence[Reference] | None = None, dependencies: Mapping[str, Dependency] | None = None, - schema_wrapped: Draft7Validator | AvroSchema | ProtobufSchema | None = None, ) -> None: self._schema_cached: Draft7Validator | AvroSchema | ProtobufSchema | None = schema - self.schema_wrapped = schema_wrapped super().__init__( schema_type=schema_type, schema_str=schema_str, diff --git a/tests/test_avro_references.py b/tests/test_avro_references.py new file mode 100644 index 000000000..af3aea0a5 --- /dev/null +++ b/tests/test_avro_references.py @@ -0,0 +1,356 @@ +""" +Copyright (c) 2023 Aiven Ltd +See LICENSE for details +""" +from karapace.dependency import Dependency +from karapace.schema_models import AvroResolver, SchemaType, ValidatedTypedSchema +from karapace.schema_references import Reference +from karapace.typing import Subject, Version + +import pytest +import textwrap + + +def create_validated_schema(schema_str: str, dependencies=None) -> ValidatedTypedSchema: + """Helper function to create a validated typed schema.""" + return ValidatedTypedSchema(schema_str=schema_str, schema_type="AVRO", dependencies=dependencies or {}) + + +@pytest.fixture(name="base_schema") +def fixture_base_schema(): + return '{"type": "record", "name": "BaseRecord", "fields": [{"name": "field1", "type": "string"}]}' + + +@pytest.fixture(name="dependency_schema") +def fixture_dependency_schema(): + return '{"type": "record", "name": "DependencyRecord", "fields": [{"name": "depField", "type": "int"}]}' + + +@pytest.fixture(name="another_dependency_schema") +def fixture_another_dependency_schema(): + return '{"type": "record", "name": "AnotherDependency", "fields": [{"name": "anotherField", "type": "boolean"}]}' + + +def test_resolver_without_dependencies(base_schema): + resolver = AvroResolver(schema_str=base_schema) + resolved_schemas = resolver.resolve() + assert resolved_schemas == [base_schema], "Expected single schema in resolved list without dependencies" + + +def test_resolver_with_single_dependency(base_schema, dependency_schema): + dependency = Dependency( + name="Dependency1", + subject=Subject("TestSubject"), + version=Version(1), + target_schema=create_validated_schema(dependency_schema), + ) + dependencies = {"Dependency1": dependency} + resolver = AvroResolver(schema_str=base_schema, dependencies=dependencies) + resolved_schemas = resolver.resolve() + assert resolved_schemas == [dependency_schema, base_schema], "Expected dependency to be resolved before base schema" + + +def test_resolver_with_multiple_dependencies(base_schema, dependency_schema, another_dependency_schema): + dependency1 = Dependency( + name="Dependency1", + subject=Subject("TestSubject1"), + version=Version(1), + target_schema=create_validated_schema(dependency_schema), + ) + dependency2 = Dependency( + name="Dependency2", + subject=Subject("TestSubject2"), + version=Version(1), + target_schema=create_validated_schema(another_dependency_schema), + ) + dependencies = {"Dependency1": dependency1, "Dependency2": dependency2} + resolver = AvroResolver(schema_str=base_schema, dependencies=dependencies) + resolved_schemas = resolver.resolve() + + # Validate both dependencies appear before the base schema, without assuming their specific order + assert dependency_schema in resolved_schemas + assert another_dependency_schema in resolved_schemas + assert resolved_schemas[-1] == base_schema, "Base schema should be the last in the resolved list" + + +def test_builder_unique_id_increment(base_schema, dependency_schema): + dependency = Dependency( + name="Dependency1", + subject=Subject("TestSubject"), + version=Version(1), + target_schema=create_validated_schema(dependency_schema), + ) + dependencies = {"Dependency1": dependency} + resolver = AvroResolver(schema_str=base_schema, dependencies=dependencies) + resolver.builder(base_schema, dependencies) + assert resolver.unique_id == 2, "Unique ID should be incremented for each processed schema" + + +def test_resolver_with_nested_dependencies(base_schema, dependency_schema, another_dependency_schema): + # Create nested dependency structure + nested_dependency = Dependency( + name="NestedDependency", + subject=Subject("NestedSubject"), + version=Version(1), + target_schema=create_validated_schema(another_dependency_schema), + ) + dependency_with_nested = Dependency( + name="Dependency1", + subject=Subject("TestSubject"), + version=Version(1), + target_schema=create_validated_schema(dependency_schema, dependencies={"NestedDependency": nested_dependency}), + ) + dependencies = {"Dependency1": dependency_with_nested} + resolver = AvroResolver(schema_str=base_schema, dependencies=dependencies) + resolved_schemas = resolver.resolve() + + # Ensure all schemas are resolved in the correct order + assert another_dependency_schema in resolved_schemas + assert dependency_schema in resolved_schemas + assert resolved_schemas[-1] == base_schema, "Base schema should be the last in the resolved list" + assert resolved_schemas.index(another_dependency_schema) < resolved_schemas.index( + dependency_schema + ), "Nested dependency should be resolved before its parent" + + +def test_avro_reference() -> None: + country_schema = ValidatedTypedSchema.parse( + schema_type=SchemaType.AVRO, + schema_str=textwrap.dedent( + """\ + { + "type": "record", + "name": "Country", + "namespace": "com.netapp", + "fields": [{"name": "name", "type": "string"}, {"name": "code", "type": "string"}] + } + """ + ), + ) + address_schema = ValidatedTypedSchema.parse( + schema_type=SchemaType.AVRO, + schema_str=textwrap.dedent( + """\ + { + "type": "record", + "name": "Address", + "namespace": "com.netapp", + "fields": [ + {"name": "street", "type": "string"}, + {"name": "city", "type": "string"}, + {"name": "postalCode", "type": "string"}, + {"name": "country", "type": "Country"} + ] + } + """ + ), + references=[Reference(name="country.avsc", subject=Subject("country"), version=Version(1))], + dependencies={ + "country": Dependency( + name="country", + subject=Subject("country"), + version=Version(1), + target_schema=country_schema, + ), + }, + ) + + # Check that the reference schema (Country) has been inlined + assert address_schema.schema == textwrap.dedent( + """\ + { + "type": "record", + "name": "Address", + "namespace": "com.netapp", + "fields": [ + { + "type": "string", + "name": "street" + }, + { + "type": "string", + "name": "city" + }, + { + "type": "string", + "name": "postalCode" + }, + { + "type": { + "type": "record", + "name": "Country", + "namespace": "com.netapp", + "fields": [ + { + "type": "string", + "name": "name" + }, + { + "type": "string", + "name": "code" + } + ] + }, + "name": "country" + } + ] + } + """ + ) + + +def test_avro_reference2() -> None: + # country.avsc + country_schema = ValidatedTypedSchema.parse( + schema_type=SchemaType.AVRO, + schema_str=textwrap.dedent( + """\ + { + "type": "record", + "name": "Country", + "namespace": "com.netapp", + "fields": [{"name": "name", "type": "string"}, {"name": "code", "type": "string"}] + } + """ + ), + ) + + # address.avsc + address_schema = ValidatedTypedSchema.parse( + schema_type=SchemaType.AVRO, + schema_str=textwrap.dedent( + """\ + { + "type": "record", + "name": "Address", + "namespace": "com.netapp", + "fields": [ + {"name": "street", "type": "string"}, + {"name": "city", "type": "string"}, + {"name": "postalCode", "type": "string"}, + {"name": "country", "type": "Country"} + ] + } + """ + ), + references=[Reference(name="country.avsc", subject=Subject("country"), version=Version(1))], + dependencies={ + "country": Dependency( + name="country", + subject=Subject("country"), + version=Version(1), + target_schema=country_schema, + ), + }, + ) + + # job.avsc + job_schema = ValidatedTypedSchema.parse( + schema_type=SchemaType.AVRO, + schema_str=textwrap.dedent( + """\ + { + "type": "record", + "name": "Job", + "namespace": "com.netapp", + "fields": [ + {"name": "title", "type": "string"}, + {"name": "salary", "type": "double"} + ] + } + """ + ), + ) + + # person.avsc + person_schema = ValidatedTypedSchema.parse( + schema_type=SchemaType.AVRO, + schema_str=textwrap.dedent( + """\ + { + "type": "record", + "name": "Person", + "namespace": "com.netapp", + "fields": [ + {"name": "name", "type": "string"}, + {"name": "age", "type": "int"}, + {"name": "address", "type": "Address"}, + {"name": "job", "type": "Job"} + ] + } + """ + ), + references=[ + Reference(name="address.avsc", subject=Subject("address"), version=Version(1)), + Reference(name="job.avsc", subject=Subject("job"), version=Version(1)), + ], + dependencies={ + "address": Dependency( + name="address", + subject=Subject("address"), + version=Version(1), + target_schema=address_schema, + ), + "job": Dependency( + name="job", + subject=Subject("job"), + version=Version(1), + target_schema=job_schema, + ), + }, + ) + + # Check that the Address and Job schemas are correctly inlined within the Person schema + expected_schema = textwrap.dedent( + """\ + { + "type": "record", + "name": "Person", + "namespace": "com.netapp", + "fields": [ + {"name": "name", "type": "string"}, + {"name": "age", "type": "int"}, + { + "name": "address", + "type": { + "type": "record", + "name": "Address", + "namespace": "com.netapp", + "fields": [ + {"name": "street", "type": "string"}, + {"name": "city", "type": "string"}, + {"name": "postalCode", "type": "string"}, + { + "name": "country", + "type": { + "type": "record", + "name": "Country", + "namespace": "com.netapp", + "fields": [ + {"name": "name", "type": "string"}, + {"name": "code", "type": "string"} + ] + } + } + ] + } + }, + { + "name": "job", + "type": { + "type": "record", + "name": "Job", + "namespace": "com.netapp", + "fields": [ + {"name": "title", "type": "string"}, + {"name": "salary", "type": "double"} + ] + } + } + ] + } + """ + ) + + # Check that the reference schemas (Address and Job, including nested Country) have been correctly inlined + assert person_schema.schema == expected_schema diff --git a/tests/unit/test_avro_merge.py b/tests/unit/test_avro_merge.py deleted file mode 100644 index 553afdd0f..000000000 --- a/tests/unit/test_avro_merge.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -karapace - Unit Test of AvroMerge class - -Copyright (c) 2023 Aiven Ltd -See LICENSE for details -""" - - -from karapace.schema_models import AvroMerge -from karapace.utils import json_decode, json_encode -from unittest.mock import MagicMock - -import pytest - - -class TestAvroMerge: - @pytest.fixture - def avro_merge(self): - schema_str = '{"type": "record", "name": "Test", "fields": [{"name": "field1", "type": "string"}]}' - dependencies = {"dependency1": MagicMock(schema=MagicMock(schema_str='{"type": "string"}', dependencies=None))} - return AvroMerge(schema_str, dependencies) - - def test_init(self, avro_merge): - assert avro_merge.schema_str == json_encode(json_decode(avro_merge.schema_str), compact=True, sort_keys=True) - assert avro_merge.unique_id == 0 - - def test_union_safe_schema_str_no_union(self, avro_merge): - result = avro_merge.union_safe_schema_str('{"type": "string"}') - expected = ( - '{"name": "___RESERVED_KARAPACE_WRAPPER_NAME_0___","type": "record", "fields": [{"name": "name", ' - '"type": [{"type": "string"}]}]}' - ) - assert result == expected - - def test_union_safe_schema_str_with_union(self, avro_merge): - result = avro_merge.union_safe_schema_str('["null", "string"]') - expected = ( - '{"name": "___RESERVED_KARAPACE_WRAPPER_NAME_0___","type": "record", "fields": [{"name": "name", ' - '"type": ["null", "string"]}]}' - ) - assert result == expected - - def test_builder_no_dependencies(self, avro_merge): - avro_merge.dependencies = None - result = avro_merge.builder(avro_merge.schema_str) - expected = ( - '{"name": "___RESERVED_KARAPACE_WRAPPER_NAME_1___","type": "record", "fields": [{"name": "name", ' - '"type": [{"fields":[{"name":"field1","type":"string"}],"name":"Test","type":"record"}]}]}' - ) - assert result == expected - - def test_builder_with_dependencies(self, avro_merge): - result = avro_merge.builder(avro_merge.schema_str, avro_merge.dependencies) - expected = ( - '{"name": "___RESERVED_KARAPACE_WRAPPER_NAME_1___","type": "record", "fields": [{"name": "name", "type": [{' - '"type": "string"}]}]},' - "\n" - '{"name": "___RESERVED_KARAPACE_WRAPPER_NAME_2___","type": "record", "fields": [{"name": "name", "type": [{' - '"fields":[{"name":"field1","type":"string"}],"name":"Test","type":"record"}]}]}' - ) - assert result == expected - - def test_wrap(self, avro_merge): - result = avro_merge.wrap() - expected = ( - "[\n" - + ( - '{"name": "___RESERVED_KARAPACE_WRAPPER_NAME_1___","type": "record", "fields": [{"name": "name", "type": [{' - '"type": "string"}]}]},' - "\n" - '{"name": "___RESERVED_KARAPACE_WRAPPER_NAME_2___","type": "record", "fields": [{"name": "name", "type": [{' - '"fields":[{"name":"field1","type":"string"}],"name":"Test","type":"record"}]}]}' - ) - + "\n]" - ) - assert result == expected