diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..1cb254f --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,58 @@ +# This file was automatically generated by sbt-github-actions using the +# githubWorkflowGenerate task. You should add and commit this file to +# your git repository. It goes without saying that you shouldn't edit +# this file by hand! Instead, if you wish to make changes, you should +# change your sbt build configuration to revise the workflow description +# to meet your needs, then regenerate this file. + +name: Continuous Integration + +on: + pull_request: + branches: ['*'] + push: + branches: ['*'] + +env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +jobs: + build: + name: Build and Test + strategy: + matrix: + os: [ubuntu-latest] + scala: [2.12.12, 2.13.4] + java: [adopt@1.8] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout current branch (full) + uses: actions/checkout@v2 + with: + fetch-depth: 0 + submodules: recursive + + - name: Setup Java and Scala + uses: olafurpg/setup-scala@v10 + with: + java-version: ${{ matrix.java }} + + - name: Cache sbt + uses: actions/cache@v2 + with: + path: | + ~/.sbt + ~/.ivy2/cache + ~/.coursier/cache/v1 + ~/.cache/coursier/v1 + ~/AppData/Local/Coursier/Cache/v1 + ~/Library/Caches/Coursier/v1 + key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} + + - name: Check that workflows are up to date + run: sbt ++${{ matrix.scala }} githubWorkflowCheck + + - name: Test + run: sbt ++${{ matrix.scala }} clean coverage scalastyle scalafmtCheckAll scalafmtSbtCheck test coverageReport + + - uses: codecov/codecov-action@v1 \ No newline at end of file diff --git a/.github/workflows/clean.yml b/.github/workflows/clean.yml new file mode 100644 index 0000000..b535fcc --- /dev/null +++ b/.github/workflows/clean.yml @@ -0,0 +1,59 @@ +# This file was automatically generated by sbt-github-actions using the +# githubWorkflowGenerate task. You should add and commit this file to +# your git repository. It goes without saying that you shouldn't edit +# this file by hand! Instead, if you wish to make changes, you should +# change your sbt build configuration to revise the workflow description +# to meet your needs, then regenerate this file. + +name: Clean + +on: push + +jobs: + delete-artifacts: + name: Delete Artifacts + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - name: Delete artifacts + run: | + # Customize those three lines with your repository and credentials: + REPO=${GITHUB_API_URL}/repos/${{ github.repository }} + + # A shortcut to call GitHub API. + ghapi() { curl --silent --location --user _:$GITHUB_TOKEN "$@"; } + + # A temporary file which receives HTTP response headers. + TMPFILE=/tmp/tmp.$$ + + # An associative array, key: artifact name, value: number of artifacts of that name. + declare -A ARTCOUNT + + # Process all artifacts on this repository, loop on returned "pages". + URL=$REPO/actions/artifacts + while [[ -n "$URL" ]]; do + + # Get current page, get response headers in a temporary file. + JSON=$(ghapi --dump-header $TMPFILE "$URL") + + # Get URL of next page. Will be empty if we are at the last page. + URL=$(grep '^Link:' "$TMPFILE" | tr ',' '\n' | grep 'rel="next"' | head -1 | sed -e 's/.*.*//') + rm -f $TMPFILE + + # Number of artifacts on this page: + COUNT=$(( $(jq <<<$JSON -r '.artifacts | length') )) + + # Loop on all artifacts on this page. + for ((i=0; $i < $COUNT; i++)); do + + # Get name of artifact and count instances of this name. + name=$(jq <<<$JSON -r ".artifacts[$i].name?") + ARTCOUNT[$name]=$(( $(( ${ARTCOUNT[$name]} )) + 1)) + + id=$(jq <<<$JSON -r ".artifacts[$i].id?") + size=$(( $(jq <<<$JSON -r ".artifacts[$i].size_in_bytes?") )) + printf "Deleting '%s' #%d, %'d bytes\n" $name ${ARTCOUNT[$name]} $size + ghapi -X DELETE $REPO/actions/artifacts/$id + done + done \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..fb70982 --- /dev/null +++ b/.gitignore @@ -0,0 +1,11 @@ +*.iml +target/ +.idea/ +.idea_modules/ +.DS_STORE +.cache +.settings +.project +.classpath +.bsp/ +tmp/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..0e295d0 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "tests"] + path = tests + url = git@github.com:json-schema-org/JSON-Schema-Test-Suite.git diff --git a/.scalafmt.conf b/.scalafmt.conf new file mode 100644 index 0000000..b8c707b --- /dev/null +++ b/.scalafmt.conf @@ -0,0 +1,15 @@ +version = 2.6.4 +continuationIndent.defnSite = 2 +docstrings = JavaDoc +includeCurlyBraceInSelectChains = false +maxColumn = 120 +newlines.alwaysBeforeElseAfterCurlyIf = false +newlines.alwaysBeforeMultilineDef = false +optIn.breakChainOnFirstMethodDot = false +rewrite.rules = [ + AvoidInfix, + RedundantParens, + AsciiSortImports, + PreferCurlyFors +] +newlines.afterCurlyLambda = preserve diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..72e6f55 --- /dev/null +++ b/README.md @@ -0,0 +1,36 @@ +# JSON Schemas for Circe + +[![Build status](https://img.shields.io/github/workflow/status/circe/circe-schema/Continuous%20Integration.svg)](https://github.com/travisbrown/circe/circe-schema) +[![Gitter](https://img.shields.io/badge/gitter-join%20chat-green.svg)](https://gitter.im/circe/) +[![Maven Central](https://img.shields.io/maven-central/v/io.circe/circe-schema_2.13.svg)](https://maven-badges.herokuapp.com/maven-central/io.circe/circe-schema_2.13) + +This library provides a [JSON Schema][json-schema] implementation for [Circe][circe], like +[circe-json-schema], but unlike circe-json-schema it is not just a wrapper for a Java library. + +It is currently a work in progress, and all APIs are subject to change. + +## Community + +This project supports the [Scala code of conduct][code-of-conduct] and wants all of its channels +(Gitter, GitHub, etc.) to be inclusive environments. + +## Copyright and license + +circe-schema is licensed under the **[Apache License, Version 2.0][apache]** +(the "License"); you may not use this software except in compliance with the +License. + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Copyright [Travis Brown][travisbrown], 2021. + +[apache]: http://www.apache.org/licenses/LICENSE-2.0 +[circe]: https://github.com/circe/circe +[circe-json-schema]: https://github.com/circe/circe-json-schema +[code-of-conduct]: https://www.scala-lang.org/conduct/ +[json-schema]: https://json-schema.org/ +[travisbrown]: https://twitter.com/travisbrown diff --git a/build.sbt b/build.sbt new file mode 100644 index 0000000..5e5d1c6 --- /dev/null +++ b/build.sbt @@ -0,0 +1,88 @@ +organization in ThisBuild := "io.circe" + +crossScalaVersions in ThisBuild := Seq("2.12.12", "2.13.4") +scalaVersion in ThisBuild := crossScalaVersions.value.last + +githubWorkflowJavaVersions in ThisBuild := Seq("adopt@1.8") +githubWorkflowPublishTargetBranches in ThisBuild := Nil +githubWorkflowJobSetup in ThisBuild := { + githubWorkflowJobSetup.in(ThisBuild).value.toList.map { + case step @ WorkflowStep.Use("actions", "checkout", "v2", _, _, _, _, _) => + step.copy(params = step.params.updated("submodules", "recursive")) + case other => other + } +} +githubWorkflowBuild in ThisBuild := Seq( + WorkflowStep.Sbt( + List( + "clean", + "coverage", + "scalastyle", + "scalafmtCheckAll", + "scalafmtSbtCheck", + "test", + "coverageReport" + ), + id = None, + name = Some("Test") + ), + WorkflowStep.Use( + "codecov", + "codecov-action", + "v1" + ) +) + +val catsVersion = "2.3.0" +val circeVersion = "0.14.0-M3" +val scalaMetaVersion = "4.4.2" + +val commonSettings = Seq( + libraryDependencies ++= Seq( + "io.circe" %% "circe-jawn" % circeVersion % Test, + "io.circe" %% "circe-generic" % circeVersion % Test, + "io.circe" %% "circe-literal" % circeVersion % Test, + "org.scalameta" %% "munit-scalacheck" % "0.7.20" % Test + ), + scalacOptions ++= Seq("-target:jvm-1.8", "-Ywarn-unused:imports") ++ { + CrossVersion.partialVersion(scalaVersion.value) match { + case Some((2, n)) if n >= 13 => Seq("-Ymacro-annotations") + case _ => Seq("-Ypartial-unification") + } + }, + testFrameworks += new TestFramework("munit.Framework"), + addCompilerPlugin(("org.typelevel" %% "kind-projector" % "0.11.2").cross(CrossVersion.full)) +) + +lazy val root = + project.in(file(".")).settings(commonSettings).aggregate(schema, gen, validation).dependsOn(schema, gen, validation) + +lazy val schema = project + .settings(moduleName := "circe-schema") + .settings(commonSettings) + .settings( + libraryDependencies ++= Seq( + "io.circe" %% "circe-core" % circeVersion, + "io.circe" %% "circe-pointer" % circeVersion, + "org.typelevel" %% "cats-core" % catsVersion + ) + ) + +lazy val gen = project + .settings(moduleName := "circe-schema-gen") + .settings(commonSettings) + .settings( + libraryDependencies ++= Seq( + "org.scalameta" %% "scalameta" % scalaMetaVersion + ) + ) + .dependsOn(schema) + +lazy val validation = + project + .settings( + moduleName := "circe-schema-validation", + libraryDependencies ++= Seq("io.circe" %% "circe-parser" % circeVersion) + ) + .settings(commonSettings) + .dependsOn(schema) diff --git a/project/build.properties b/project/build.properties new file mode 100644 index 0000000..d91c272 --- /dev/null +++ b/project/build.properties @@ -0,0 +1 @@ +sbt.version=1.4.6 diff --git a/project/plugins.sbt b/project/plugins.sbt new file mode 100644 index 0000000..8fe1cd8 --- /dev/null +++ b/project/plugins.sbt @@ -0,0 +1,11 @@ +addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.9.5") +addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.4.6") +addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") +addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") +addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") +addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") diff --git a/scalastyle-config.xml b/scalastyle-config.xml new file mode 100644 index 0000000..e628f69 --- /dev/null +++ b/scalastyle-config.xml @@ -0,0 +1,85 @@ + + Circe Configuration + + + FOR + IF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + all + .+ + + + diff --git a/schema/src/main/scala/io/circe/schema/Constraint.scala b/schema/src/main/scala/io/circe/schema/Constraint.scala new file mode 100644 index 0000000..1b2141e --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Constraint.scala @@ -0,0 +1,300 @@ +package io.circe.schema + +import cats.{Applicative, Traverse} +import cats.data.Validated +import cats.kernel.Monoid +import cats.syntax.apply._ +import cats.syntax.foldable._ +import cats.syntax.functor._ +import cats.syntax.traverse._ +import io.circe.{Decoder, DecodingFailure, HCursor, Json} +import io.circe.schema.{Format => SchemaFormat} +import scala.collection.immutable.ListMap + +sealed trait Constraint[+S] { + def compatible: Set[Type] +} + +object Constraint { + implicit val traverseConstraint: Traverse[Constraint] = new DefaultFoldable[Constraint] with Traverse[Constraint] { + def traverse[G[_], A, B](fa: Constraint[A])(f: A => G[B])(implicit G: Applicative[G]): G[Constraint[B]] = + fa match { + case AllOf(schemas) => schemas.traverse(f).map(AllOf(_)) + case AnyOf(schemas) => schemas.traverse(f).map(AnyOf(_)) + case OneOf(schemas) => schemas.traverse(f).map(OneOf(_)) + case Not(schema) => f(schema).map(Not(_)) + case Contains(schema) => f(schema).map(Contains(_)) + case ItemsType(schema) => f(schema).map(ItemsType(_)) + case ItemsTuple(schemas) => schemas.traverse(f).map(ItemsTuple(_)) + case AdditionalItems(schema, startIndex) => f(schema).map(AdditionalItems(_, startIndex)) + case Properties(values) => values.traverse(_.traverse(f)).map(Properties(_)) + case PropertyNames(schema) => f(schema).map(PropertyNames(_)) + case AdditionalProperties(schema, knownNames, patterns) => + f(schema).map(AdditionalProperties(_, knownNames, patterns)) + case Dependencies(values) => values.traverse(_.traverse(f)).map(Dependencies(_)) + case PatternProperties(values) => + values.traverse(pair => f(pair._2).map((pair._1, _))).map(PatternProperties(_)) + case Conditional(ifSchema, thenSchema, elseSchema) => + (f(ifSchema), thenSchema.traverse(f), elseSchema.traverse(f)).mapN(Conditional(_, _, _)) + + case c @ Types(_) => G.pure(c) + case c @ Enumeration(_) => G.pure(c) + case c @ Minimum(_) => G.pure(c) + case c @ Maximum(_) => G.pure(c) + case c @ ExclusiveMinimum(_) => G.pure(c) + case c @ ExclusiveMaximum(_) => G.pure(c) + case c @ MultipleOf(_) => G.pure(c) + case c @ Pattern(_) => G.pure(c) + case c @ Format(_) => G.pure(c) + case c @ ContentMediaType(_, _) => G.pure(c) + case c @ ContentEncoding(_) => G.pure(c) + case c @ MinLength(_) => G.pure(c) + case c @ MaxLength(_) => G.pure(c) + case c @ UniqueItems(_) => G.pure(c) + case c @ MinItems(_) => G.pure(c) + case c @ MaxItems(_) => G.pure(c) + case c @ Required(_) => G.pure(c) + case c @ MinProperties(_) => G.pure(c) + case c @ MaxProperties(_) => G.pure(c) + } + + override def foldMap[A, B](fa: Constraint[A])(f: A => B)(implicit B: Monoid[B]): B = + fa match { + case AllOf(schemas) => schemas.foldMap(f) + case AnyOf(schemas) => schemas.foldMap(f) + case OneOf(schemas) => schemas.foldMap(f) + case Not(schema) => f(schema) + case Contains(schema) => f(schema) + case ItemsType(schema) => f(schema) + case ItemsTuple(schemas) => schemas.foldMap(f) + case AdditionalItems(schema, startIndex) => f(schema) + case Properties(values) => values.foldMap(_.foldMap(f)) + case PropertyNames(schema) => f(schema) + case AdditionalProperties(schema, knownNames, patterns) => f(schema) + case Dependencies(values) => values.foldMap(_.foldMap(f)) + case PatternProperties(values) => values.foldMap(pair => f(pair._2)) + case Conditional(ifSchema, thenSchema, elseSchema) => + B.combine(f(ifSchema), B.combine(thenSchema.foldMap(f), elseSchema.foldMap(f))) + case _ => B.empty + } + } + + private val allTypes: Set[Type] = + Set(Type.Array, Type.Boolean, Type.Integer, Type.Null, Type.Number, Type.Object, Type.String) + private val numericTypes: Set[Type] = Set(Type.Integer, Type.Number) + private val stringTypes: Set[Type] = Set(Type.String) + private val arrayTypes: Set[Type] = Set(Type.Array) + private val objectTypes: Set[Type] = Set(Type.Object) + + sealed abstract class AnyTypeConstraint[+S] extends Constraint[S] { + final def compatible: Set[Type] = allTypes + } + + sealed abstract class NumericConstraint extends Constraint[Nothing] { + def value: BigDecimal + final def compatible: Set[Type] = numericTypes + } + + sealed abstract class StringConstraint extends Constraint[Nothing] { + final def compatible: Set[Type] = stringTypes + } + + sealed abstract class ArrayConstraint[+S] extends Constraint[S] { + final def compatible: Set[Type] = arrayTypes + } + + sealed abstract class ObjectConstraint[+S] extends Constraint[S] { + final def compatible: Set[Type] = objectTypes + } + + case class Types(values: Vector[Type]) extends Constraint[Nothing] { + val compatible: Set[Type] = values.toSet + } + + case class Enumeration(values: Vector[Json]) extends AnyTypeConstraint[Nothing] + case class AllOf[+S](schemas: Vector[S]) extends AnyTypeConstraint[S] + case class AnyOf[+S](schemas: Vector[S]) extends AnyTypeConstraint[S] + case class OneOf[+S](schemas: Vector[S]) extends AnyTypeConstraint[S] + case class Not[+S](schema: S) extends AnyTypeConstraint[S] + + case class Minimum(value: BigDecimal) extends NumericConstraint + case class Maximum(value: BigDecimal) extends NumericConstraint + case class ExclusiveMinimum(value: BigDecimal) extends NumericConstraint + case class ExclusiveMaximum(value: BigDecimal) extends NumericConstraint + case class MultipleOf(value: BigDecimal) extends NumericConstraint + + case class Pattern(regex: String) extends StringConstraint + case class Format(format: SchemaFormat) extends StringConstraint + case class ContentMediaType(value: String, encoding: Option[Encoding]) extends StringConstraint + case class ContentEncoding(value: Encoding) extends StringConstraint + case class MinLength(value: Int) extends StringConstraint + case class MaxLength(value: Int) extends StringConstraint + + case class UniqueItems(value: Boolean) extends ArrayConstraint[Nothing] + case class MinItems(value: Int) extends ArrayConstraint[Nothing] + case class MaxItems(value: Int) extends ArrayConstraint[Nothing] + case class Contains[+S](schema: S) extends ArrayConstraint[S] + case class ItemsType[+S](schema: S) extends ArrayConstraint[S] + case class ItemsTuple[+S](schemas: Vector[S]) extends ArrayConstraint[S] + case class AdditionalItems[+S](schema: S, startIndex: Int) extends ArrayConstraint[S] + + case class Properties[+S](values: Vector[Property[S]]) extends ObjectConstraint[S] + case class Required(names: Vector[String]) extends ObjectConstraint[Nothing] + case class PropertyNames[+S](schema: S) extends ObjectConstraint[S] + case class MinProperties(value: Int) extends ObjectConstraint[Nothing] + case class MaxProperties(value: Int) extends ObjectConstraint[Nothing] + case class AdditionalProperties[S](schema: S, knownNames: Set[String], patterns: Set[String]) + extends ObjectConstraint[S] + case class Dependencies[+S](values: Vector[Dependency[S]]) extends ObjectConstraint[S] + case class PatternProperties[+S](values: Vector[(String, S)]) extends ObjectConstraint[S] + + case class Conditional[+S](ifSchema: S, thenSchema: Option[S], elseSchema: Option[S]) extends AnyTypeConstraint[S] + + private def propertyFromPair[S](p: (String, S)): Property[S] = Property(p._1, p._2) + private lazy val decodeSchemaVector: Decoder[Vector[Schema.Unresolved]] = Decoder.decodeVector[Schema.Unresolved] + private lazy val decodeSchemaListMap: Decoder[ListMap[String, Schema.Unresolved]] = + Decoder[ListMap[String, Schema.Unresolved]] + + private val decodeTypeSingle: Decoder[Constraint[Schema.Unresolved]] = + Decoder[Type].map(value => Types(Vector(value))) + private val decodeTypeArray: Decoder[Constraint[Schema.Unresolved]] = Decoder[Vector[Type]].map(Types(_)) + private val decodeType: Decoder[Constraint[Schema.Unresolved]] = decodeTypeSingle.or(decodeTypeArray) + private val decodeEnum: Decoder[Constraint[Schema.Unresolved]] = Decoder[Vector[Json]].map(Enumeration(_)) + private val decodeConst: Decoder[Constraint[Schema.Unresolved]] = + Decoder[Json].map(value => Enumeration(Vector(value))) + + private lazy val decodeAllOf: Decoder[Constraint[Schema.Unresolved]] = decodeSchemaVector.map(AllOf(_)) + private lazy val decodeAnyOf: Decoder[Constraint[Schema.Unresolved]] = decodeSchemaVector.map(AnyOf(_)) + private lazy val decodeOneOf: Decoder[Constraint[Schema.Unresolved]] = decodeSchemaVector.map(OneOf(_)) + private lazy val decodeNot: Decoder[Constraint[Schema.Unresolved]] = Decoder[Schema.Unresolved].map(Not(_)) + + private val decodeMinimum: Decoder[Constraint[Schema.Unresolved]] = Decoder[BigDecimal].map(Minimum(_)) + private val decodeMaximum: Decoder[Constraint[Schema.Unresolved]] = Decoder[BigDecimal].map(Maximum(_)) + private val decodeExclusiveMinimum: Decoder[Constraint[Schema.Unresolved]] = + Decoder[BigDecimal].map(ExclusiveMinimum(_)) + private val decodeExclusiveMaximum: Decoder[Constraint[Schema.Unresolved]] = + Decoder[BigDecimal].map(ExclusiveMaximum(_)) + private val decodeMultipleOf: Decoder[Constraint[Schema.Unresolved]] = Decoder[BigDecimal].map(MultipleOf(_)) + + private val decodePattern: Decoder[Constraint[Schema.Unresolved]] = Decoder[String].map(Pattern(_)) + private val decodeFormat: Decoder[Constraint[Schema.Unresolved]] = Decoder[SchemaFormat].map(Format(_)) + private val decodeContentMediaType: Decoder[Constraint[Schema.Unresolved]] = + Decoder[String].map(ContentMediaType(_, None)) + private val decodeContentEncoding: Decoder[Constraint[Schema.Unresolved]] = + Decoder[Encoding].map(ContentEncoding(_)) + private val decodeMinLength: Decoder[Constraint[Schema.Unresolved]] = Decoder[Int].map(MinLength(_)) + private val decodeMaxLength: Decoder[Constraint[Schema.Unresolved]] = Decoder[Int].map(MaxLength(_)) + + private val decodeUniqueItems: Decoder[Constraint[Schema.Unresolved]] = Decoder[Boolean].map(UniqueItems(_)) + private val decodeMinItems: Decoder[Constraint[Schema.Unresolved]] = Decoder[Int].map(MinItems(_)) + private val decodeMaxItems: Decoder[Constraint[Schema.Unresolved]] = Decoder[Int].map(MaxItems(_)) + private lazy val decodeContains: Decoder[Constraint[Schema.Unresolved]] = Decoder[Schema.Unresolved].map(Contains(_)) + private lazy val decodeItemsType: Decoder[Constraint[Schema.Unresolved]] = + Decoder[Schema.Unresolved].map(ItemsType(_)) + private lazy val decodeItemsTuple: Decoder[Constraint[Schema.Unresolved]] = decodeSchemaVector.map(ItemsTuple(_)) + private lazy val decodeItems: Decoder[Constraint[Schema.Unresolved]] = decodeItemsType.or(decodeItemsTuple) + // We initialize `startIndex` to `Int.MaxValue`, since it depends on the `items` field. + private lazy val decodeAdditionalItems: Decoder[Constraint[Schema.Unresolved]] = + Decoder[Schema.Unresolved].map(AdditionalItems(_, Int.MaxValue)) + + private lazy val decodeProperties: Decoder[Constraint[Schema.Unresolved]] = + decodeSchemaListMap.map(pairs => Properties(pairs.map(propertyFromPair).toVector)) + private val decodeRequired: Decoder[Constraint[Schema.Unresolved]] = Decoder[Vector[String]].map(Required(_)) + private lazy val decodePropertyNames: Decoder[Constraint[Schema.Unresolved]] = + Decoder[Schema.Unresolved].map(PropertyNames(_)) + private val decodeMinProperties: Decoder[Constraint[Schema.Unresolved]] = Decoder[Int].map(MinProperties(_)) + private val decodeMaxProperties: Decoder[Constraint[Schema.Unresolved]] = Decoder[Int].map(MaxProperties(_)) + private lazy val decodeAdditionalProperties: Decoder[Constraint[Schema.Unresolved]] = + Decoder[Schema.Unresolved].map(AdditionalProperties(_, Set.empty, Set.empty)) + private lazy val decodePatternProperties: Decoder[Constraint[Schema.Unresolved]] = + decodeSchemaListMap.map(pairs => PatternProperties(pairs.toVector)) + private lazy val decodeDependencies: Decoder[Constraint[Schema.Unresolved]] = + Dependency.decodeDependencies.map(Dependencies(_)) + private lazy val decodeConditional: Decoder[Constraint[Schema.Unresolved]] = + Decoder[Schema.Unresolved].map(Conditional(_, None, None)) + + private lazy val decodeThenSchema: Decoder[Option[Schema.Unresolved]] = Decoder[Option[Schema.Unresolved]].at("then") + private lazy val decodeElseSchema: Decoder[Option[Schema.Unresolved]] = Decoder[Option[Schema.Unresolved]].at("else") + + val decodeConstraints: Decoder[Vector[Constraint[Schema.Unresolved]]] = + new Decoder[Vector[Constraint[Schema.Unresolved]]] { + private[this] lazy val decoders: Map[String, Decoder[Constraint[Schema.Unresolved]]] = Map( + "type" -> decodeType, + "enum" -> decodeEnum, + "allOf" -> decodeAllOf, + "anyOf" -> decodeAnyOf, + "oneOf" -> decodeOneOf, + "not" -> decodeNot, + "const" -> decodeConst, + "minimum" -> decodeMinimum, + "maximum" -> decodeMaximum, + "exclusiveMinimum" -> decodeExclusiveMinimum, + "exclusiveMaximum" -> decodeExclusiveMaximum, + "multipleOf" -> decodeMultipleOf, + "pattern" -> decodePattern, + "format" -> decodeFormat, + "contentMediaType" -> decodeContentMediaType, + "contentEncoding" -> decodeContentEncoding, + "minLength" -> decodeMinLength, + "maxLength" -> decodeMaxLength, + "uniqueItems" -> decodeUniqueItems, + "minItems" -> decodeMinItems, + "maxItems" -> decodeMaxItems, + "contains" -> decodeContains, + "items" -> decodeItems, + "additionalItems" -> decodeAdditionalItems, + "properties" -> decodeProperties, + "required" -> decodeRequired, + "propertyNames" -> decodePropertyNames, + "minProperties" -> decodeMinProperties, + "maxProperties" -> decodeMaxProperties, + "additionalProperties" -> decodeAdditionalProperties, + "patternProperties" -> decodePatternProperties, + "dependencies" -> decodeDependencies, + "if" -> decodeConditional + ) + + private def knownKeys(keys: Iterable[String]): Vector[String] = keys.filter(decoders.contains).toVector + + def apply(c: HCursor): Decoder.Result[Vector[Constraint[Schema.Unresolved]]] = c.keys match { + case None => Left(DecodingFailure("Constraints", c.history)) + case Some(keys) => knownKeys(keys).traverse(key => c.get(key)(decoders(key))) + } + + override def decodeAccumulating(c: HCursor): Decoder.AccumulatingResult[Vector[Constraint[Schema.Unresolved]]] = + c.keys match { + case None => Validated.invalidNel(DecodingFailure("Constraints", c.history)) + case Some(keys) => + keys + .filter(decoders.contains) + .toVector + .traverse(key => decoders(key).tryDecodeAccumulating(c.downField(key))) + } + }.flatMap { constraints => + val maybeIfSchema = constraints.collectFirst { + case Conditional(ifSchema, None, None) => ifSchema + } + + maybeIfSchema match { + case Some(ifSchema) => + decodeThenSchema.product(decodeElseSchema).map { + case (thenSchema, elseSchema) => + constraints :+ Conditional(ifSchema, thenSchema, elseSchema) + } + case None => Decoder.const(constraints) + } + } + + // This thing is generic but I don't know of a good place to get it. + private abstract class DefaultFoldable[F[_]] { self: Traverse[F] => + import cats.Eval + import cats.arrow.Category + import cats.data.Chain + + final def foldRight[A, B](fa: F[A], lb: Eval[B])(f: (A, Eval[B]) => Eval[B]): Eval[B] = + foldMap[A, Eval[B] => Eval[B]](fa)(a => lbb => Eval.defer(f(a, lbb)))(Category[Function1].algebra)(lb) + + final def foldLeft[A, B](fa: F[A], b: B)(f: (B, A) => B): B = + foldMap[A, Chain[A]](fa)(Chain.one).foldLeft(b)(f) + } +} diff --git a/schema/src/main/scala/io/circe/schema/Dependency.scala b/schema/src/main/scala/io/circe/schema/Dependency.scala new file mode 100644 index 0000000..7a5c281 --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Dependency.scala @@ -0,0 +1,59 @@ +package io.circe.schema + +import cats.{Applicative, Eval, FlatMap, Traverse} +import io.circe.Decoder +import scala.annotation.tailrec +import scala.collection.immutable.ListMap + +sealed trait Dependency[+S] { + def name: String +} + +object Dependency { + import io.circe.schema.{Schema => CirceSchema} + + case class Property(name: String, other: Vector[String]) extends Dependency[Nothing] + case class Schema[+S](name: String, schema: S) extends Dependency[S] + + implicit val traverseInstance: Traverse[Dependency] with FlatMap[Dependency] = new Traverse[Dependency] + with FlatMap[Dependency] { + def traverse[G[_], A, B](fa: Dependency[A])(f: A => G[B])(implicit G: Applicative[G]): G[Dependency[B]] = + fa match { + case p @ Property(_, _) => G.pure(p) + case Schema(name, schema) => G.map(f(schema))(Schema(name, _)) + } + + def foldLeft[A, B](fa: Dependency[A], b: B)(f: (B, A) => B): B = fa match { + case p @ Property(_, _) => b + case Schema(_, schema) => f(b, schema) + } + + def foldRight[A, B](fa: Dependency[A], lb: Eval[B])(f: (A, Eval[B]) => Eval[B]): Eval[B] = fa match { + case p @ Property(_, _) => lb + case Schema(_, schema) => f(schema, lb) + } + + def flatMap[A, B](fa: Dependency[A])(f: A => Dependency[B]): Dependency[B] = fa match { + case p @ Property(_, _) => p + case Schema(_, schema) => f(schema) + } + + @tailrec + def tailRecM[A, B](a: A)(f: A => Dependency[Either[A, B]]): Dependency[B] = f(a) match { + case p @ Property(_, _) => p + case Schema(name, Right(b)) => Schema(name, b) + case Schema(_, Left(a)) => tailRecM(a)(f) + } + } + + private implicit lazy val decodeValue: Decoder[Either[Vector[String], CirceSchema.Unresolved]] = + Decoder[Vector[String]].either(Decoder[CirceSchema.Unresolved]) + + private[schema] lazy val decodeDependencies: Decoder[Vector[Dependency[CirceSchema.Unresolved]]] = + Decoder[ListMap[String, Either[Vector[String], CirceSchema.Unresolved]]].map { pairs => + pairs.map { + case (name, Right(schema)) => Schema(name, schema) + case (name, Left(other)) => Property(name, other) + }.toVector + } +} diff --git a/schema/src/main/scala/io/circe/schema/Encoding.scala b/schema/src/main/scala/io/circe/schema/Encoding.scala new file mode 100644 index 0000000..ece6e63 --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Encoding.scala @@ -0,0 +1,36 @@ +package io.circe.schema + +import io.circe.{Decoder, Encoder} + +sealed trait Encoding { + def name: String +} + +object Encoding { + case object SevenBit extends Encoding { + def name: String = "7bit" + } + + case object EightBit extends Encoding { + def name: String = "8bit" + } + + case object Binary extends Encoding { + def name: String = "binary" + } + + case object QuotedPrintable extends Encoding { + def name: String = "quoted-printable" + } + + case object Base64 extends Encoding { + def name: String = "base64" + } + + private val byName: Map[String, Encoding] = + List(SevenBit, EightBit, Binary, QuotedPrintable, Base64).map(tpe => (tpe.name, tpe)).toMap + + implicit val decodeEncoding: Decoder[Encoding] = + Decoder[String].emap(value => byName.get(value).toRight("Encoding")) + implicit val encodeEncoding: Encoder[Encoding] = Encoder[String].contramap(_.name) +} diff --git a/schema/src/main/scala/io/circe/schema/Format.scala b/schema/src/main/scala/io/circe/schema/Format.scala new file mode 100644 index 0000000..41634b1 --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Format.scala @@ -0,0 +1,119 @@ +package io.circe.schema + +import io.circe.{Decoder, Encoder} + +sealed trait Format { + def name: String + def since: Version +} + +object Format { + case object DateTime extends Format { + def name: String = "date-time" + def since: Version = Version.Draft4 + } + + case object Date extends Format { + def name: String = "date" + def since: Version = Version.Draft7 + } + + case object Time extends Format { + def name: String = "time" + def since: Version = Version.Draft7 + } + + case object Email extends Format { + def name: String = "email" + def since: Version = Version.Draft4 + } + + case object EmailIdn extends Format { + def name: String = "idn-email" + def since: Version = Version.Draft7 + } + + case object Hostname extends Format { + def name: String = "hostname" + def since: Version = Version.Draft4 + } + + case object HostnameIdn extends Format { + def name: String = "idn-hostname" + def since: Version = Version.Draft7 + } + + case object Ipv4 extends Format { + def name: String = "ipv4" + def since: Version = Version.Draft4 + } + + case object Ipv6 extends Format { + def name: String = "ipv6" + def since: Version = Version.Draft4 + } + + case object Uri extends Format { + def name: String = "uri" + def since: Version = Version.Draft4 + } + + case object UriReference extends Format { + def name: String = "uri-reference" + def since: Version = Version.Draft6 + } + + case object Iri extends Format { + def name: String = "iri" + def since: Version = Version.Draft7 + } + + case object IriReference extends Format { + def name: String = "iri-reference" + def since: Version = Version.Draft7 + } + + case object UriTemplate extends Format { + def name: String = "uri-template" + def since: Version = Version.Draft6 + } + + case object JsonPointer extends Format { + def name: String = "json-pointer" + def since: Version = Version.Draft6 + } + + case object RelativeJsonPointer extends Format { + def name: String = "relative-json-pointer" + def since: Version = Version.Draft7 + } + + case object Regex extends Format { + def name: String = "regex" + def since: Version = Version.Draft7 + } + + private val byName: Map[String, Format] = + List( + DateTime, + Date, + Time, + Email, + EmailIdn, + Hostname, + HostnameIdn, + Ipv4, + Ipv6, + Uri, + UriReference, + Iri, + IriReference, + UriTemplate, + JsonPointer, + RelativeJsonPointer, + Regex + ).map(tpe => (tpe.name, tpe)).toMap + + implicit val decodeFormat: Decoder[Format] = Decoder[String].emap(value => byName.get(value).toRight("Format")) + implicit val encodeFormat: Encoder[Format] = Encoder[String].contramap(_.name) +} diff --git a/schema/src/main/scala/io/circe/schema/Metadata.scala b/schema/src/main/scala/io/circe/schema/Metadata.scala new file mode 100644 index 0000000..7fc5ed1 --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Metadata.scala @@ -0,0 +1,39 @@ +package io.circe.schema + +import cats.syntax.apply._ +import io.circe.{Decoder, Json, JsonObject} + +case class Metadata( + id: Option[String], + version: Option[Version], + title: Option[String], + description: Option[String], + default: Option[Json], + examples: Option[List[Json]] +) + +object Metadata { + val empty: Metadata = Metadata(None, None, None, None, None, None) + + private val idDecoderDraft4: Decoder[Option[String]] = Decoder[Option[String]].at("id") + private val idDecoderOther: Decoder[Option[String]] = Decoder[Option[String]].at("$id") + + implicit val decodeMetadata: Decoder[Metadata] = ( + Decoder[JsonObject], + Decoder[Option[Version]].at("$schema"), + Decoder[Option[String]].at("title"), + Decoder[Option[String]].at("description"), + Decoder[Option[Json]].at("default"), + Decoder[Option[List[Json]]].at("examples") + ).mapN((_, version, title, description, default, examples) => + Metadata(None, version, title, description, default, examples) + ).flatMap { metadata => + val idDecoder = if (metadata.version.contains(Version.Draft4)) { + idDecoderDraft4 + } else { + idDecoderOther + } + + idDecoder.map(_.fold(metadata)(id => metadata.copy(id = Some(id)))) + } +} diff --git a/schema/src/main/scala/io/circe/schema/Property.scala b/schema/src/main/scala/io/circe/schema/Property.scala new file mode 100644 index 0000000..994b36c --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Property.scala @@ -0,0 +1,23 @@ +package io.circe.schema + +import cats.{Applicative, Eval, FlatMap, Traverse} +import scala.annotation.tailrec + +case class Property[+S](name: String, schema: S) + +object Property { + implicit val traverseProperty: Traverse[Property] with FlatMap[Property] = new Traverse[Property] + with FlatMap[Property] { + def traverse[G[_], A, B](fa: Property[A])(f: A => G[B])(implicit G: Applicative[G]): G[Property[B]] = + G.map(f(fa.schema))(Property(fa.name, _)) + def foldLeft[A, B](fa: Property[A], b: B)(f: (B, A) => B): B = f(b, fa.schema) + def foldRight[A, B](fa: Property[A], lb: Eval[B])(f: (A, Eval[B]) => Eval[B]): Eval[B] = f(fa.schema, lb) + def flatMap[A, B](fa: Property[A])(f: A => Property[B]): Property[B] = f(fa.schema) + + @tailrec + def tailRecM[A, B](a: A)(f: A => Property[Either[A, B]]): Property[B] = f(a) match { + case Property(name, Right(b)) => Property(name, b) + case Property(_, Left(a)) => tailRecM(a)(f) + } + } +} diff --git a/schema/src/main/scala/io/circe/schema/Resolver.scala b/schema/src/main/scala/io/circe/schema/Resolver.scala new file mode 100644 index 0000000..221ca34 --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Resolver.scala @@ -0,0 +1,99 @@ +package io.circe.schema + +import cats.{Applicative, ApplicativeError, Eval, Monad} +import cats.data.StateT +import cats.syntax.functor._ +import cats.syntax.traverse._ +import io.circe.{Decoder, DecodingFailure, HCursor} +import io.circe.pointer.{Pointer, PointerFailure, PointerSyntaxError} +import java.net.URI + +abstract class Resolver[F[_], R](implicit private val F: Applicative[F]) + extends (Schema[R] => F[Eval[Schema[Nothing]]]) { + protected[this] def resolve(cursor: HCursor, uri: R): F[Eval[Schema.Resolved]] + + final def apply(schema: Schema[R]): F[Eval[Schema[Nothing]]] = schema match { + case Schema.RejectAll => F.pure(Eval.now(Schema.RejectAll)) + case Schema.AcceptAll => F.pure(Eval.now(Schema.AcceptAll)) + case Schema.Ref(cursor, value) => F.map(resolve(cursor, value))(es => es.map(_ => Schema.Link(es))) + case Schema.Link(schema) => F.pure(schema) + case Schema.Constraints(metadata, constraints) => + F.map(constraints.traverse(_.traverse(apply(_))))((cs: Vector[Constraint[Eval[Schema.Resolved]]]) => + cs.traverse(_.sequence).map(Schema.Constraints(metadata, _)) + ) + } +} + +abstract class CachingUriResolver[F[_]](implicit F: Monad[F]) extends Resolver[StateT[F, Resolver.Cache, *], URI] {} + +object Resolver { + type Cache = Map[Pointer, Eval[Schema.Resolved]] + type Result[A] = Either[Failure, A] + type StateResult[A] = StateT[Result, Cache, A] + + sealed abstract class Failure extends Exception + + object Failure { + case class Unresolvable(uri: URI) extends Failure + case class InvalidPointerSyntax(error: PointerSyntaxError) extends Failure + case class InvalidLocation(failure: PointerFailure) extends Failure + case class InvalidSchema(failure: DecodingFailure) extends Failure + + def unresolvable[F[_], A](uri: URI)(implicit F: ApplicativeError[F, Failure]): F[A] = + F.raiseError(Unresolvable(uri)) + def invalidPointerSyntax[F[_], A](error: PointerSyntaxError)(implicit F: ApplicativeError[F, Failure]): F[A] = + F.raiseError(InvalidPointerSyntax(error)) + def invalidLocation[F[_], A](failure: PointerFailure)(implicit F: ApplicativeError[F, Failure]): F[A] = + F.raiseError(InvalidLocation(failure)) + def invalidSchema[F[_], A](failure: DecodingFailure)(implicit F: ApplicativeError[F, Failure]): F[A] = + F.raiseError(InvalidSchema(failure)) + } + + private def link(schema: Schema.Resolved): Eval[Schema.Resolved] = schema match { + case Schema.RejectAll => Eval.now(Schema.RejectAll) + case Schema.AcceptAll => Eval.now(Schema.AcceptAll) + case Schema.Ref(_, value) => value + case s @ Schema.Link(schema) => schema + case s @ Schema.Constraints(metadata, constraints) => Eval.now(s) + } + + // A simple resolver that only works with pointers into the current document. + val local: Resolver[StateResult, URI] = new CachingUriResolver[Result] { + def resolve(cursor: HCursor, uri: URI): StateResult[Eval[Schema.Resolved]] = { + if (uri.getScheme.eq(null) && uri.getSchemeSpecificPart.isEmpty) { + val fragment = Option(uri.getFragment).getOrElse("") + + Pointer.parse(fragment) match { + case Right(pointer) => + StateT.inspect[Result, Cache, Option[Eval[Schema.Resolved]]](_.get(pointer)).flatMap { + case Some(cached) => StateT.pure(cached) + case None => + val cc = pointer(cursor) + + StateT + .liftF[Result, Cache, HCursor]( + cc.success.toRight(Failure.InvalidLocation(PointerFailure(cc.history))) + ) + .flatMap { hc => + Decoder[Schema[URI]].apply(hc) match { + case Right(schema) => + //this(schema).flatMap { + // case Schema.RejectAll => F.pure(Schema.RejectAll) + // case Schema.AcceptAll => F.pure(Schema.AcceptAll) + + this(schema).flatMap { resolved => + StateT.modify[Result, Cache](_.updated(pointer, resolved)).as(resolved) + } + case Left(failure) => Failure.invalidSchema[StateResult, Eval[Schema.Resolved]](failure) + } + } + } + case Left(error) => Failure.invalidPointerSyntax[StateResult, Eval[Schema.Resolved]](error) + } + + } else { + Failure.unresolvable[StateResult, Eval[Schema.Resolved]](uri) + } + } + } +} diff --git a/schema/src/main/scala/io/circe/schema/Schema.scala b/schema/src/main/scala/io/circe/schema/Schema.scala new file mode 100644 index 0000000..0e02801 --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Schema.scala @@ -0,0 +1,139 @@ +package io.circe.schema + +import cats.Eval +import cats.kernel.Monoid +import io.circe.{Decoder, HCursor} +import java.net.{URI, URISyntaxException} + +sealed trait Schema[+R] { + def metadata: Metadata + def references: Vector[R] + def unresolved: Boolean +} + +object Schema { + type Unresolved = Schema[URI] + type Resolved = Schema[Nothing] + + case object RejectAll extends Schema[Nothing] { + def metadata: Metadata = Metadata.empty + def references: Vector[Nothing] = Vector.empty + def unresolved: Boolean = false + } + + case object AcceptAll extends Schema[Nothing] { + def metadata: Metadata = Metadata.empty + def references: Vector[Nothing] = Vector.empty + def unresolved: Boolean = false + } + + case class Ref[+R](cursor: HCursor, value: R) extends Schema[R] { + def metadata: Metadata = Metadata.empty + def references: Vector[R] = Vector(value) + def unresolved: Boolean = true + } + + case class Link(schema: Eval[Resolved]) extends Schema[Nothing] { + def metadata: Metadata = Metadata.empty + def references: Vector[Nothing] = Vector.empty + def unresolved: Boolean = false + } + + private[this] val disjunctionMonoid: Monoid[Boolean] = Monoid.instance(false, _ || _) + + case class Constraints[R](metadata: Metadata, constraints: Vector[Constraint[Schema[R]]]) extends Schema[R] { + import cats.syntax.foldable._ + + def references: Vector[R] = constraints.foldMap(_.foldMap(_.references)) + def unresolved: Boolean = constraints.foldMap(_.foldMap(_.unresolved)(disjunctionMonoid))(disjunctionMonoid) + } + + private def postProcessConstraints[R](constraints: Vector[Constraint[Schema[R]]]): Vector[Constraint[Schema[R]]] = { + val encoding: Option[Encoding] = constraints.collectFirst { + case Constraint.ContentEncoding(value) => value + } + + val newConstraints = encoding match { + case Some(encoding) => + constraints.map { + case Constraint.ContentMediaType(value, _) => Constraint.ContentMediaType(value, Some(encoding)) + case other => other + } + case None => constraints + } + + val additionalItemsIndex = newConstraints.indexWhere { + case Constraint.AdditionalItems(_, _) => true + case _ => false + } + + val constraintsWithAdditionalItems = if (additionalItemsIndex == -1) { + newConstraints + } else { + val itemsTupleSize = newConstraints.collectFirst { + case Constraint.ItemsTuple(schemas) => schemas.size + } + + itemsTupleSize match { + case Some(size) => + val newValue = newConstraints(additionalItemsIndex) match { + case Constraint.AdditionalItems(schema, _) => Constraint.AdditionalItems(schema, size) + case other => other + } + newConstraints.updated(additionalItemsIndex, newValue) + case None => + newConstraints.patch(additionalItemsIndex, Nil, 1) + } + } + + val additionalPropertiesIndex = constraintsWithAdditionalItems.indexWhere { + case Constraint.AdditionalProperties(_, _, _) => true + case _ => false + } + + if (additionalPropertiesIndex == -1) { + constraintsWithAdditionalItems + } else { + val maybeKnownNames = constraintsWithAdditionalItems.collectFirst { + case Constraint.Properties(values) => values.map(_.name).toSet + } + + val maybePatterns = constraintsWithAdditionalItems.collectFirst { + case Constraint.PatternProperties(values) => values.map(_._1).toSet + } + + if (maybeKnownNames.isEmpty && maybePatterns.isEmpty) { + constraintsWithAdditionalItems + } else { + constraintsWithAdditionalItems(additionalPropertiesIndex) match { + case original @ Constraint.AdditionalProperties(_, _, _) => + val withKnownNames = maybeKnownNames.fold(original)(knownNames => original.copy(knownNames = knownNames)) + val withPatterns = + maybePatterns.fold(withKnownNames)(patterns => withKnownNames.copy(patterns = patterns)) + constraintsWithAdditionalItems.updated(additionalPropertiesIndex, withPatterns) + case _ => constraintsWithAdditionalItems + } + } + } + } + + private val decodeBooleanSchema: Decoder[Schema[URI]] = + Decoder[Boolean].map(value => if (value) AcceptAll else RejectAll) + + private val decodeRef: Decoder[Schema[URI]] = Decoder[HCursor].product(Decoder[String].at("$ref")).emap { + case (cursor, value) => + try { + Right(Ref(cursor, new URI(value)): Schema[URI]) + } catch { + case e: URISyntaxException => Left(e.getReason) + } + } + + private lazy val decodeConstraints: Decoder[Schema[URI]] = + Decoder[Metadata].product(Constraint.decodeConstraints).map { + case (metadata, constraints) => + Constraints(metadata, postProcessConstraints(constraints)) + } + + implicit lazy val decodeSchema: Decoder[Schema[URI]] = decodeRef.or(decodeConstraints).or(decodeBooleanSchema) +} diff --git a/schema/src/main/scala/io/circe/schema/Type.scala b/schema/src/main/scala/io/circe/schema/Type.scala new file mode 100644 index 0000000..041d6a5 --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Type.scala @@ -0,0 +1,43 @@ +package io.circe.schema + +import io.circe.{Decoder, Encoder} + +sealed trait Type { + def name: String +} + +object Type { + case object String extends Type { + def name: java.lang.String = "string" + } + + case object Integer extends Type { + def name: java.lang.String = "integer" + } + + case object Number extends Type { + def name: java.lang.String = "number" + } + + case object Object extends Type { + def name: java.lang.String = "object" + } + + case object Array extends Type { + def name: java.lang.String = "array" + } + + case object Boolean extends Type { + def name: java.lang.String = "boolean" + } + + case object Null extends Type { + def name: java.lang.String = "null" + } + + private val byName: Map[java.lang.String, Type] = + List(String, Integer, Number, Object, Array, Boolean, Null).map(tpe => (tpe.name, tpe)).toMap + + implicit val decodeType: Decoder[Type] = Decoder[java.lang.String].emap(value => byName.get(value).toRight("Type")) + implicit val encodeType: Encoder[Type] = Encoder[java.lang.String].contramap(_.name) +} diff --git a/schema/src/main/scala/io/circe/schema/Version.scala b/schema/src/main/scala/io/circe/schema/Version.scala new file mode 100644 index 0000000..3d4d786 --- /dev/null +++ b/schema/src/main/scala/io/circe/schema/Version.scala @@ -0,0 +1,35 @@ +package io.circe.schema + +import cats.kernel.Order +import io.circe.Decoder +import java.net.URI + +sealed trait Version { + def uri: URI +} + +object Version { + case object Draft4 extends Version { + val uri: URI = new URI("http://json-schema.org/draft-04/schema#") + } + + case object Draft6 extends Version { + val uri: URI = new URI("http://json-schema.org/draft-06/schema#") + } + + case object Draft7 extends Version { + val uri: URI = new URI("http://json-schema.org/draft-07/schema#") + } + + case object Draft201909 extends Version { + val uri: URI = new URI("http://json-schema.org/draft-2019-09/schema#") + } + + implicit val versionOrder: Order[Version] = Order.by(_.uri.toString) + implicit val versionOrdering: Ordering[Version] = versionOrder.toOrdering + + private val byURI: Map[String, Version] = + List(Draft4, Draft6, Draft7, Draft201909).map(version => (version.uri.toString, version)).toMap + + implicit val decodeVersion: Decoder[Version] = Decoder[String].emap(value => byURI.get(value).toRight("Version")) +} diff --git a/tests b/tests new file mode 160000 index 0000000..8daea3f --- /dev/null +++ b/tests @@ -0,0 +1 @@ +Subproject commit 8daea3f47e52526518cc2e88ef5d25d8a7070e3c diff --git a/validation/src/main/scala/io/circe/schema/validation/Compiler.scala b/validation/src/main/scala/io/circe/schema/validation/Compiler.scala new file mode 100644 index 0000000..144e31e --- /dev/null +++ b/validation/src/main/scala/io/circe/schema/validation/Compiler.scala @@ -0,0 +1,519 @@ +package io.circe.schema.validation + +import cats.kernel.Eq +import io.circe.{HCursor, Json, JsonNumber, JsonObject} +import io.circe.schema.{Constraint, Dependency, Property, Schema, Type} +import io.circe.schema.validation.{FormatValidator => ValidationFormatValidator} +import java.util.regex.Pattern + +trait Compiler { + final def apply(schema: Schema.Resolved): Validator = schema match { + case Schema.Constraints(_, constraints) => + if (constraints.size == 1) compile(constraints(0)) else Validator.Combined(constraints.map(compile)) + case Schema.AcceptAll => Validator.AcceptAll + case Schema.RejectAll => Validator.RejectAll + case Schema.Link(schema) => apply(schema.value) + case Schema.Ref(_, nothing) => nothing + } + + def compile(constraint: Constraint[Schema.Resolved]): Validator +} + +object Compiler { + val default: Compiler = apply() + def apply( + config: Configuration = Configuration.default, + formatValidator: ValidationFormatValidator = ValidationFormatValidator.default, + mediaTypeValidator: MediaTypeValidator = MediaTypeValidator.default, + encodingValidator: EncodingValidator = EncodingValidator.default + )(implicit eqJson: Eq[Json]): Compiler = + new Impl(config, formatValidator, mediaTypeValidator, encodingValidator, eqJson) + + private class Impl( + config: Configuration, + val formatValidator: ValidationFormatValidator, + val mediaTypeValidator: MediaTypeValidator, + val encodingValidator: EncodingValidator, + val eqJson: Eq[Json] + ) extends Compiler { + def compile(constraint: Constraint[Schema.Resolved]): Validator = constraint match { + case Constraint.Types(values) => new TypesValidator(values) + case Constraint.Enumeration(values) => new EnumerationValidator(values, eqJson) + case Constraint.AllOf(schemas) => new AllOfValidator(schemas, this) + case Constraint.AnyOf(schemas) => new AnyOfValidator(schemas, this) + case Constraint.OneOf(schemas) => new OneOfValidator(schemas, this) + case Constraint.Not(schema) => new NotValidator(schema, this) + case c @ Constraint.Minimum(_) => new MinimumValidator(c) + case c @ Constraint.Maximum(_) => new MaximumValidator(c) + case c @ Constraint.ExclusiveMinimum(_) => new ExclusiveMinimumValidator(c) + case c @ Constraint.ExclusiveMaximum(_) => new ExclusiveMaximumValidator(c) + case c @ Constraint.MultipleOf(_) => new MultipleOfValidator(c, config.multipleOfBigDecimal) + case c @ Constraint.Pattern(_) => new PatternValidator(c) + case c @ Constraint.Format(_) => new FormatValidator(c, formatValidator) + case c @ Constraint.ContentMediaType(_, _) => + new ContentMediaTypeValidator(c, mediaTypeValidator, encodingValidator) + case c @ Constraint.ContentEncoding(_) => new ContentEncodingValidator(c, encodingValidator) + case c @ Constraint.MinLength(_) => new MinLengthValidator(c) + case c @ Constraint.MaxLength(_) => new MaxLengthValidator(c) + case Constraint.UniqueItems(false) => Validator.AcceptAll + case c @ Constraint.UniqueItems(true) => new UniqueItemsValidator(c, eqJson) + case c @ Constraint.MinItems(_) => new MinItemsValidator(c) + case c @ Constraint.MaxItems(_) => new MaxItemsValidator(c) + case c @ Constraint.Contains(_) => new ContainsValidator(c, this) + case Constraint.ItemsType(schema) => new ItemsTypeValidator(schema, 0, this) + case Constraint.ItemsTuple(schemas) => new ItemsTupleValidator(schemas, this) + case Constraint.AdditionalItems(schema, startIndex) => new ItemsTypeValidator(schema, startIndex, this) + case Constraint.Properties(values) => new PropertiesValidator(values, this) + case Constraint.Required(names) => new RequiredValidator(names) + case Constraint.PropertyNames(schema) => new PropertyNamesValidator(schema, this) + case Constraint.MinProperties(limit) => new MinPropertiesValidator(limit) + case Constraint.MaxProperties(limit) => new MaxPropertiesValidator(limit) + case Constraint.PatternProperties(values) => new PatternPropertiesValidator(values, this) + case Constraint.AdditionalProperties(schema, knownNames, patterns) => + new AdditionalPropertiesValidator(schema, knownNames, patterns, this) + case Constraint.Dependencies(values) => new DependenciesValidator(values, this) + case Constraint.Conditional(ifSchema, thenSchema, elseSchema) => + new ConditionalValidator(ifSchema, thenSchema, elseSchema, this) + } + } + + private final class TypesValidator(values: Vector[Type]) extends Validator { + private[this] val acceptArray: Boolean = values.contains(Type.Array) + private[this] val acceptBoolean: Boolean = values.contains(Type.Boolean) + private[this] val acceptInteger: Boolean = values.contains(Type.Integer) + private[this] val acceptNull: Boolean = values.contains(Type.Null) + private[this] val acceptNumber: Boolean = values.contains(Type.Number) + private[this] val acceptObject: Boolean = values.contains(Type.Object) + private[this] val acceptString: Boolean = values.contains(Type.String) + + private[this] val folder: Json.Folder[Boolean] = new Json.Folder[Boolean] { + def onArray(value: Vector[Json]): Boolean = acceptArray + def onBoolean(value: Boolean): Boolean = acceptBoolean + def onNull: Boolean = acceptNull + def onObject(value: JsonObject): Boolean = acceptObject + def onString(value: String): Boolean = acceptString + def onNumber(value: JsonNumber): Boolean = if (acceptNumber) { + true + } else if (acceptInteger) { + value.toBigDecimal.exists(_.isWhole) + } else { + false + } + } + + def apply(c: HCursor): Vector[ValidationError] = if (c.value.foldWith(folder)) { + Vector.empty + } else { + Vector(TypeError(c.history, values)) + } + } + + private final class EnumerationValidator(values: Vector[Json], eqJson: Eq[Json]) extends Validator { + def apply(c: HCursor): Vector[ValidationError] = if (values.exists(eqJson.eqv(c.value, _))) { + Vector.empty + } else { + Vector(EnumerationError(c.history, values.toList)) + } + } + + private final class AllOfValidator(schemas: Vector[Schema.Resolved], compiler: Compiler) extends Validator { + private[this] val compiled: Vector[Validator] = schemas.map(compiler(_)) + + def apply(c: HCursor): Vector[ValidationError] = compiled.flatMap(_(c)) + } + + private final class AnyOfValidator(schemas: Vector[Schema.Resolved], compiler: Compiler) extends Validator { + private[this] val compiled: Vector[(Schema.Resolved, Validator)] = schemas.map(schema => (schema, compiler(schema))) + + def apply(c: HCursor): Vector[ValidationError] = { + val results = compiled.map { + case (schema, validator) => (schema, validator(c)) + } + + if (results.exists(_._2.isEmpty)) { + Vector.empty + } else { + Vector(AnyOfError(c.history, results)) + } + } + } + + private final class OneOfValidator(schemas: Vector[Schema.Resolved], compiler: Compiler) extends Validator { + private[this] val compiled: Vector[(Schema.Resolved, Validator)] = schemas.map(schema => (schema, compiler(schema))) + + def apply(c: HCursor): Vector[ValidationError] = { + val results = compiled.map { + case (schema, validator) => (schema, validator(c)) + } + + if (results.count(_._2.isEmpty) == 1) { + Vector.empty + } else { + Vector(OneOfError(c.history, results)) + } + } + } + + private final class NotValidator(schema: Schema.Resolved, compiler: Compiler) extends Validator { + private[this] val compiled: Validator = compiler(schema) + + def apply(c: HCursor): Vector[ValidationError] = if (compiled(c).nonEmpty) { + Vector.empty + } else { + Vector(NotError(c.history, schema)) + } + } + + private abstract class NumericConstraintValidator(constraint: Constraint.NumericConstraint) extends Validator { + protected[this] def check(actualValue: BigDecimal, constraintValue: BigDecimal): Boolean + + final def apply(c: HCursor): Vector[ValidationError] = c.value.asNumber match { + case Some(asNumber) => + asNumber.toBigDecimal match { + case Some(asBigDecimal) => + if (check(asBigDecimal, constraint.value)) { + Vector.empty + } else { + Vector(NumericConstraintError(c.history, Some(asBigDecimal), constraint)) + } + case None => Vector(NumericConstraintError(c.history, None, constraint)) + } + case None => Vector.empty + } + } + + private final class MinimumValidator(constraint: Constraint.NumericConstraint) + extends NumericConstraintValidator(constraint) { + protected[this] def check(actualValue: BigDecimal, constraintValue: BigDecimal): Boolean = + actualValue >= constraintValue + } + + private final class MaximumValidator(constraint: Constraint.NumericConstraint) + extends NumericConstraintValidator(constraint) { + protected[this] def check(actualValue: BigDecimal, constraintValue: BigDecimal): Boolean = + actualValue <= constraintValue + } + + private final class ExclusiveMinimumValidator(constraint: Constraint.NumericConstraint) + extends NumericConstraintValidator(constraint) { + protected[this] def check(actualValue: BigDecimal, constraintValue: BigDecimal): Boolean = + actualValue > constraintValue + } + + private final class ExclusiveMaximumValidator(constraint: Constraint.NumericConstraint) + extends NumericConstraintValidator(constraint) { + protected[this] def check(actualValue: BigDecimal, constraintValue: BigDecimal): Boolean = + actualValue < constraintValue + } + + private final class MultipleOfValidator(constraint: Constraint.NumericConstraint, multipleOfBigDecimal: Boolean) + extends NumericConstraintValidator(constraint) { + protected[this] def check(actualValue: BigDecimal, constraintValue: BigDecimal): Boolean = { + val result = (actualValue / constraintValue).isWhole + + if (multipleOfBigDecimal) { + result + } else { + result && !(actualValue.doubleValue / constraintValue.doubleValue).isInfinity + } + } + } + + private abstract class StringConstraintValidator(constraint: Constraint.StringConstraint) extends Validator { + protected[this] def check(actualValue: String): Boolean + + final def apply(c: HCursor): Vector[ValidationError] = c.value.asString match { + case Some(asString) => + if (check(asString)) { + Vector.empty + } else { + Vector(StringConstraintError(c.history, asString, constraint)) + } + + case None => Vector.empty + } + } + + private final class PatternValidator(constraint: Constraint.Pattern) extends StringConstraintValidator(constraint) { + val compiled = Pattern.compile(constraint.regex) + + protected[this] def check(actualValue: String): Boolean = compiled.matcher(actualValue).find + } + + private final class FormatValidator(constraint: Constraint.Format, formatValidator: ValidationFormatValidator) + extends StringConstraintValidator(constraint) { + protected[this] def check(actualValue: String): Boolean = formatValidator.isValid(constraint.format, actualValue) + } + + private final class ContentMediaTypeValidator( + constraint: Constraint.ContentMediaType, + mediaTypeValidator: MediaTypeValidator, + encodingValidator: EncodingValidator + ) extends StringConstraintValidator(constraint) { + protected[this] def check(actualValue: String): Boolean = + mediaTypeValidator.isValid( + constraint.value, + constraint.encoding.flatMap(encodingValidator.decode(_, actualValue)).getOrElse(actualValue) + ) + } + + private final class ContentEncodingValidator( + constraint: Constraint.ContentEncoding, + encodingValidator: EncodingValidator + ) extends StringConstraintValidator(constraint) { + protected[this] def check(actualValue: String): Boolean = encodingValidator.isValid(constraint.value, actualValue) + } + + private final class MinLengthValidator(constraint: Constraint.MinLength) + extends StringConstraintValidator(constraint) { + protected[this] def check(actualValue: String): Boolean = + actualValue.codePointCount(0, actualValue.length) >= constraint.value + } + + private final class MaxLengthValidator(constraint: Constraint.MaxLength) + extends StringConstraintValidator(constraint) { + protected[this] def check(actualValue: String): Boolean = + actualValue.codePointCount(0, actualValue.length) <= constraint.value + } + + private final class UniqueItemsValidator(constraint: Constraint.ArrayConstraint[Schema.Resolved], eqJson: Eq[Json]) + extends Validator { + final def apply(c: HCursor): Vector[ValidationError] = c.values match { + case Some(valuesIterable) => + val values = valuesIterable.toVector + val unique = 0.until(values.length).forall { i => + (i + 1).until(values.length).forall { j => + eqJson.neqv(values(i), values(j)) + } + } + + if (unique) { + Vector.empty + } else { + Vector(ArrayConstraintError(c.history, constraint)) + } + case None => Vector.empty + } + } + + private final class MinItemsValidator(constraint: Constraint.MinItems) extends Validator { + final def apply(c: HCursor): Vector[ValidationError] = c.values match { + case Some(valuesIterable) => + if (valuesIterable.size >= constraint.value) { + Vector.empty + } else { + Vector(ArrayConstraintError(c.history, constraint)) + } + case None => Vector.empty + } + } + + private final class MaxItemsValidator(constraint: Constraint.MaxItems) extends Validator { + final def apply(c: HCursor): Vector[ValidationError] = c.values match { + case Some(valuesIterable) => + if (valuesIterable.size <= constraint.value) { + Vector.empty + } else { + Vector(ArrayConstraintError(c.history, constraint)) + } + case None => Vector.empty + } + } + + private final class ContainsValidator(constraint: Constraint.Contains[Schema.Resolved], compiler: Compiler) + extends Validator { + private[this] val compiled: Validator = compiler(constraint.schema) + + final def apply(c: HCursor): Vector[ValidationError] = c.values match { + case Some(valuesIterable) => + if (valuesIterable.toVector.exists(compiled.isValid)) { + Vector.empty + } else { + Vector(ArrayConstraintError(c.history, constraint)) + } + case None => Vector.empty + } + } + + private final class ItemsTypeValidator(schema: Schema.Resolved, startIndex: Int, compiler: Compiler) + extends Validator { + private[this] val compiled: Validator = compiler(schema) + + final def apply(c: HCursor): Vector[ValidationError] = c.values match { + case Some(valuesIterable) => + startIndex.until(valuesIterable.size).flatMap(n => c.downN(n).success).flatMap(compiled).toVector + case None => Vector.empty + } + } + + private final class ItemsTupleValidator(schemas: Vector[Schema.Resolved], compiler: Compiler) extends Validator { + private[this] val compiledWithIndices: Vector[(Validator, Int)] = schemas.map(compiler(_)).zipWithIndex + + final def apply(c: HCursor): Vector[ValidationError] = c.values match { + case Some(valuesIterable) => + compiledWithIndices.flatMap { + case (validator, n) => + val ac = c.downN(n) + ac.success match { + case None => Vector.empty + case Some(cc) => validator(cc) + } + } + case None => Vector.empty + } + } + + private final class PropertiesValidator(values: Vector[Property[Schema.Resolved]], compiler: Compiler) + extends Validator { + private[this] val compiled: Map[String, Validator] = values.map { + case Property(name, schema) => (name, compiler(schema)) + }.toMap + + final def apply(c: HCursor): Vector[ValidationError] = c.keys match { + case Some(keys) => + keys.flatMap { key => + compiled.get(key) match { + case Some(validator) => + c.downField(key).success match { + case Some(cc) => validator(cc) + case None => Vector.empty + } + case None => Vector.empty + } + }.toVector + case None => Vector.empty + } + } + + private final class RequiredValidator(names: Vector[String]) extends Validator { + final def apply(c: HCursor): Vector[ValidationError] = c.keys match { + case Some(keys) => names.filterNot(keys.toSet).map(ObjectRequiredPropertyError(c.history, _)) + case None => Vector.empty + } + } + + private final class PropertyNamesValidator(schema: Schema.Resolved, compiler: Compiler) extends Validator { + private[this] val compiled: Validator = compiler(schema) + + final def apply(c: HCursor): Vector[ValidationError] = c.keys match { + case Some(keys) => + keys.flatMap(name => compiled(Json.fromString(name).hcursor).map(_.withHistory(c.history))).toVector + case None => Vector.empty + } + } + + private final class MinPropertiesValidator(limit: Int) extends Validator { + final def apply(c: HCursor): Vector[ValidationError] = c.keys match { + case Some(keys) => + val size = keys.size + + if (size >= limit) { + Vector.empty + } else { + Vector(ObjectMinPropertiesError(c.history, size, limit)) + } + case None => Vector.empty + } + } + + private final class MaxPropertiesValidator(limit: Int) extends Validator { + final def apply(c: HCursor): Vector[ValidationError] = c.keys match { + case Some(keys) => + val size = keys.size + + if (size <= limit) { + Vector.empty + } else { + Vector(ObjectMaxPropertiesError(c.history, size, limit)) + } + case None => Vector.empty + } + } + + private final class PatternPropertiesValidator(values: Vector[(String, Schema.Resolved)], compiler: Compiler) + extends Validator { + private[this] val compiled: Vector[(Pattern, Validator)] = + values.map(p => (Pattern.compile(p._1), compiler(p._2))) + + final def apply(c: HCursor): Vector[ValidationError] = c.keys match { + case Some(keys) => + keys.flatMap { key => + compiled.flatMap { + case (pattern, validator) => + if (pattern.matcher(key).find) { + c.downField(key).success match { + case Some(cc) => validator(cc) + case None => Vector.empty + } + } else { + Vector.empty + } + } + }.toVector + case None => Vector.empty + } + } + + private final class AdditionalPropertiesValidator( + schema: Schema.Resolved, + knownNames: Set[String], + patterns: Set[String], + compiler: Compiler + ) extends Validator { + private[this] val compiled: Validator = compiler(schema) + private[this] val compiledPatterns = patterns.map(Pattern.compile) + + final def apply(c: HCursor): Vector[ValidationError] = c.keys match { + case Some(keys) => + keys + .filterNot(key => knownNames(key) || compiledPatterns.exists(_.matcher(key).find)) + .flatMap { key => + c.downField(key).success match { + case Some(cc) => compiled(cc) + case None => Vector.empty + } + } + .toVector + case None => Vector.empty + } + } + + private final class DependenciesValidator(dependencies: Vector[Dependency[Schema.Resolved]], compiler: Compiler) + extends Validator { + import cats.syntax.functor._ + + private[this] val compiled: Vector[Dependency[Validator]] = dependencies.map(_.map(compiler(_))) + + final def apply(c: HCursor): Vector[ValidationError] = c.keys match { + case Some(keys) => + val keySet = keys.toSet + + compiled.filter(dependency => keySet(dependency.name)).flatMap { + case Dependency.Property(_, required) => + required.filterNot(keySet).map(ObjectRequiredPropertyError(c.history, _)) + case Dependency.Schema(_, validator) => validator(c) + } + + case None => Vector.empty + } + } + + private final class ConditionalValidator( + ifSchema: Schema.Resolved, + thenSchema: Option[Schema.Resolved], + elseSchema: Option[Schema.Resolved], + compiler: Compiler + ) extends Validator { + private[this] val compiledIf: Validator = compiler(ifSchema) + private[this] val compiledThen = thenSchema.map(compiler(_)) + private[this] val compiledElse = elseSchema.map(compiler(_)) + + final def apply(c: HCursor): Vector[ValidationError] = { + if (compiledIf(c).isEmpty) { + compiledThen.fold(Vector.empty[ValidationError])(_(c)) + } else { + compiledElse.fold(Vector.empty[ValidationError])(_(c)) + } + } + } +} diff --git a/validation/src/main/scala/io/circe/schema/validation/Configuration.scala b/validation/src/main/scala/io/circe/schema/validation/Configuration.scala new file mode 100644 index 0000000..abe5ab1 --- /dev/null +++ b/validation/src/main/scala/io/circe/schema/validation/Configuration.scala @@ -0,0 +1,7 @@ +package io.circe.schema.validation + +case class Configuration(multipleOfBigDecimal: Boolean) + +object Configuration { + val default: Configuration = Configuration(false) +} diff --git a/validation/src/main/scala/io/circe/schema/validation/EncodingValidator.scala b/validation/src/main/scala/io/circe/schema/validation/EncodingValidator.scala new file mode 100644 index 0000000..5b5a2b2 --- /dev/null +++ b/validation/src/main/scala/io/circe/schema/validation/EncodingValidator.scala @@ -0,0 +1,26 @@ +package io.circe.schema.validation + +import io.circe.schema.Encoding +import java.util.Base64 +import scala.util.Try + +trait EncodingValidator { + def isValid(encoding: Encoding, input: String): Boolean + def decode(encoding: Encoding, input: String): Option[String] +} + +object EncodingValidator { + val default: EncodingValidator = new DefaultEncodingValidator() + + private class DefaultEncodingValidator extends EncodingValidator { + final def isValid(encoding: Encoding, input: String): Boolean = encoding match { + case Encoding.Base64 => Try(Base64.getDecoder().decode(input)).isSuccess + case _ => true + } + + final def decode(encoding: Encoding, input: String): Option[String] = encoding match { + case Encoding.Base64 => Try(Base64.getDecoder().decode(input)).toOption.map(new String(_)) + case _ => Some(input) + } + } +} diff --git a/validation/src/main/scala/io/circe/schema/validation/FormatValidator.scala b/validation/src/main/scala/io/circe/schema/validation/FormatValidator.scala new file mode 100644 index 0000000..07c8d2e --- /dev/null +++ b/validation/src/main/scala/io/circe/schema/validation/FormatValidator.scala @@ -0,0 +1,32 @@ +package io.circe.schema.validation + +import io.circe.schema.Format +import java.net.URI +import java.time.{LocalDate, OffsetDateTime, OffsetTime} +import java.util.regex.Pattern +import scala.util.Try + +trait FormatValidator { + def isValid(format: Format, input: String): Boolean +} + +object FormatValidator { + val default: FormatValidator = new DefaultFormatValidator() + + private val emailPattern: Pattern = Pattern.compile( + """|\A[a-z0-9!#$%&'*+/=?^_‘{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_‘{|}~-]+)* + |@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\z + |""".stripMargin.replaceAll("\n", "") + ) + + private class DefaultFormatValidator extends FormatValidator { + final def isValid(format: Format, input: String): Boolean = format match { + case Format.DateTime => Try(OffsetDateTime.parse(input)).isSuccess + case Format.Date => Try(LocalDate.parse(input)).isSuccess + case Format.Time => Try(OffsetTime.parse(input)).isSuccess + case Format.Uri => Try(new URI(input)).isSuccess + case Format.Email => emailPattern.matcher(input).matches + case _ => true + } + } +} diff --git a/validation/src/main/scala/io/circe/schema/validation/MediaTypeValidator.scala b/validation/src/main/scala/io/circe/schema/validation/MediaTypeValidator.scala new file mode 100644 index 0000000..0f31835 --- /dev/null +++ b/validation/src/main/scala/io/circe/schema/validation/MediaTypeValidator.scala @@ -0,0 +1,18 @@ +package io.circe.schema.validation + +import io.circe.parser + +trait MediaTypeValidator { + def isValid(mediaType: String, input: String): Boolean +} + +object MediaTypeValidator { + val default: MediaTypeValidator = new DefaultMediaTypeValidator() + + private class DefaultMediaTypeValidator extends MediaTypeValidator { + final def isValid(mediaType: String, input: String): Boolean = mediaType match { + case "application/json" => parser.parse(input).isRight + case _ => true + } + } +} diff --git a/validation/src/main/scala/io/circe/schema/validation/ValidationError.scala b/validation/src/main/scala/io/circe/schema/validation/ValidationError.scala new file mode 100644 index 0000000..310eb3b --- /dev/null +++ b/validation/src/main/scala/io/circe/schema/validation/ValidationError.scala @@ -0,0 +1,121 @@ +package io.circe.schema.validation + +import io.circe.{CursorOp, Json} +import io.circe.schema.{Schema, Type} + +sealed abstract class ValidationError extends Exception { + def history: List[CursorOp] + + final def withHistory(newHistory: List[CursorOp]): ValidationError = this match { + case e @ EnumerationError(_, _) => e.copy(history = newHistory) + case e @ TypeError(_, _) => e.copy(history = newHistory) + case e @ RejectAllError(_) => e.copy(history = newHistory) + case e @ ArrayAdditionalItemError(_, _) => e.copy(history = newHistory) + case e @ ObjectRequiredPropertyError(_, _) => e.copy(history = newHistory) + case e @ ObjectMinPropertiesError(_, _, _) => e.copy(history = newHistory) + case e @ ObjectMaxPropertiesError(_, _, _) => e.copy(history = newHistory) + case e @ ObjectAdditionalPropertyError(_, _) => e.copy(history = newHistory) + case e @ AnyOfError(_, _) => e.copy(history = newHistory) + case e @ OneOfError(_, _) => e.copy(history = newHistory) + case e @ NotError(_, _) => e.copy(history = newHistory) + case e @ NumericConstraintError(_, _, _) => e.copy(history = newHistory) + case e @ StringConstraintError(_, _, _) => e.copy(history = newHistory) + case e @ ArrayConstraintError(_, _) => e.copy(history = newHistory) + } +} + +case class EnumerationError(history: List[CursorOp], expected: List[Json]) extends ValidationError { + final override def getMessage(): String = + s"Expected one of [${expected.map(_.noSpaces).mkString(", ")}]" +} + +case class TypeError(history: List[CursorOp], expected: Vector[Type]) extends ValidationError { + final override def getMessage(): String = s"Expected type to be one of ${expected.mkString(", ")}" +} + +case class RejectAllError(history: List[CursorOp]) extends ValidationError { + final override def getMessage(): String = s"Schema rejects all values" +} + +case class ArrayAdditionalItemError(history: List[CursorOp], index: Int) extends ValidationError { + final override def getMessage(): String = s"Invalid additional array item at index ${index}" +} + +case class ObjectRequiredPropertyError(history: List[CursorOp], name: String) extends ValidationError { + final override def getMessage(): String = s"Expected object with property ${name}" +} + +case class ObjectMinPropertiesError(history: List[CursorOp], length: Int, n: Int) extends ValidationError { + final override def getMessage(): String = s"Expected object with >= ${n} properties, got ${length}" +} + +case class ObjectMaxPropertiesError(history: List[CursorOp], length: Int, n: Int) extends ValidationError { + final override def getMessage(): String = s"Expected object with <= ${n} properties, got ${length}" +} + +case class ObjectAdditionalPropertyError(history: List[CursorOp], name: String) extends ValidationError { + final override def getMessage(): String = s"Invalid additional object property ${name}" +} + +case class AnyOfError( + history: List[CursorOp], + errors: Vector[(Schema.Resolved, Vector[ValidationError])] +) extends ValidationError { + final override def getMessage(): String = "Expected at least one schema to match" +} + +case class OneOfError( + history: List[CursorOp], + errors: Vector[(Schema.Resolved, Vector[ValidationError])] +) extends ValidationError { + final override def getMessage(): String = "Expected exactly one schema to match" +} + +case class NotError(history: List[CursorOp], schema: Schema.Resolved) extends ValidationError { + final override def getMessage(): String = "Expected schema to fail" +} + +case class NumericConstraintError( + history: List[CursorOp], + number: Option[BigDecimal], + constraint: io.circe.schema.Constraint.NumericConstraint +) extends ValidationError { + private[this] val asString: String = + number.fold("a number that cannot be represented as a BigDecimal value")(_.toString) + + final override def getMessage(): String = constraint match { + case io.circe.schema.Constraint.Minimum(target) => s"Expected number >= ${target}, got ${asString}" + case io.circe.schema.Constraint.Maximum(target) => s"Expected number <= ${target}, got ${asString}" + case io.circe.schema.Constraint.ExclusiveMinimum(target) => s"Expected number > ${target}, got ${asString}" + case io.circe.schema.Constraint.ExclusiveMaximum(target) => s"Expected number < ${target}, got ${asString}" + case io.circe.schema.Constraint.MultipleOf(target) => s"Expected a multiple of ${target}, got ${asString}" + } +} + +case class StringConstraintError( + history: List[CursorOp], + value: String, + constraint: io.circe.schema.Constraint.StringConstraint +) extends ValidationError { + final override def getMessage(): String = constraint match { + case io.circe.schema.Constraint.Pattern(regex) => s"Expected string matching ${regex}, got ${value}" + case io.circe.schema.Constraint.Format(format) => s"Expected ${format.name} string, got ${value}" + case io.circe.schema.Constraint.ContentMediaType(mediaType, _) => s"Expected ${mediaType} string, got ${value}" + case io.circe.schema.Constraint.ContentEncoding(encoding) => s"Expected ${encoding.name} string, got ${value}" + case io.circe.schema.Constraint.MinLength(n) => s"Expected string with length >= ${n}, got ${value}" + case io.circe.schema.Constraint.MaxLength(n) => s"Expected string with length <= ${n}, got ${value}" + } +} + +case class ArrayConstraintError( + history: List[CursorOp], + constraint: io.circe.schema.Constraint.ArrayConstraint[Schema.Resolved] +) extends ValidationError { + final override def getMessage(): String = constraint match { + case io.circe.schema.Constraint.UniqueItems(false) => "Unexpected error" + case io.circe.schema.Constraint.UniqueItems(true) => "Expected unique values in array" + case io.circe.schema.Constraint.MinItems(n) => s"Expected array with length >= ${n}" + case io.circe.schema.Constraint.MaxItems(n) => s"Expected array with length <= ${n}" + case io.circe.schema.Constraint.Contains(schema) => "Expected at least one array value to match schema" + } +} diff --git a/validation/src/main/scala/io/circe/schema/validation/Validator.scala b/validation/src/main/scala/io/circe/schema/validation/Validator.scala new file mode 100644 index 0000000..17e6f8d --- /dev/null +++ b/validation/src/main/scala/io/circe/schema/validation/Validator.scala @@ -0,0 +1,19 @@ +package io.circe.schema.validation + +import io.circe.{HCursor, Json} + +trait Validator extends (HCursor => Vector[ValidationError]) { + final def isValid(value: Json): Boolean = apply(value.hcursor).isEmpty +} + +object Validator { + object AcceptAll extends Validator { + def apply(c: HCursor): Vector[ValidationError] = Vector.empty + } + object RejectAll extends Validator { + def apply(c: HCursor): Vector[ValidationError] = Vector(RejectAllError(c.history)) + } + case class Combined(validators: Vector[Validator]) extends Validator { + def apply(c: HCursor): Vector[ValidationError] = validators.flatMap(_(c)) + } +} diff --git a/validation/src/test/scala/io/circe/schema/validation/TestSuiteTests.scala b/validation/src/test/scala/io/circe/schema/validation/TestSuiteTests.scala new file mode 100644 index 0000000..ed628e9 --- /dev/null +++ b/validation/src/test/scala/io/circe/schema/validation/TestSuiteTests.scala @@ -0,0 +1,115 @@ +package io.circe.schema.validation + +import cats.syntax.apply._ +import io.circe.{Decoder, Json} +import io.circe.schema.{Resolver, Schema} +import java.io.File +import java.net.URI +import munit.FunSuite + +case class SchemaTestCase(description: String, data: Json, valid: Boolean) +case class SchemaTest(description: String, schema: Schema[URI], tests: List[SchemaTestCase]) {} + +object SchemaTestCase { + implicit val decodeSchemaTestCase: Decoder[SchemaTestCase] = io.circe.generic.semiauto.deriveDecoder +} + +object SchemaTest { + implicit val decodeSchemaTest: Decoder[SchemaTest] = ( + Decoder[String].at("description"), + // We have to reset the cursor because the references in the tests are actually wrong. + Decoder[Schema[URI]].prepare { c => + val cs = c.downField("schema") + cs.success.fold(cs)(_.value.hcursor) + }, + Decoder[List[SchemaTestCase]].at("tests") + ).mapN(SchemaTest(_, _, _)) +} + +abstract class TestSuiteTests(path: String, config: Configuration = Configuration.default) extends FunSuite { + val compiler: Compiler = Compiler(config) + + val tests: List[SchemaTest] = io.circe.jawn.decodeFile[List[SchemaTest]](new File(path)) match { + case Right(value) => value + case Left(error) => throw error + } + + val deduplicated = tests + .foldLeft((Set.empty[String], List.empty[SchemaTest])) { + case ((seen, res), c @ SchemaTest(description, _, _)) => + (seen + description, if (seen(description)) res else c :: res) + } + ._2 + .reverse + + deduplicated.foreach { + case SchemaTest(description, schema, tests) => + tests.foreach { + case SchemaTestCase(caseDescription, data, valid) => + val expected = if (valid) "validate successfully" else "fail to validate" + test(s"$description: $caseDescription should $expected") { + val Right(resolvedSchema) = Resolver.local(schema).runA(Map.empty) + val errors = compiler(resolvedSchema.value)(data.hcursor) + + if (valid) { + assert(errors.isEmpty) + } else { + assert(errors.nonEmpty) + } + } + } + } +} + +class AdditionalItemsTestSuiteTests extends TestSuiteTests("tests/tests/draft7/additionalItems.json") +class AdditionalPropertiesTestSuiteTests extends TestSuiteTests("tests/tests/draft7/additionalProperties.json") +class AllOfTestSuiteTests extends TestSuiteTests("tests/tests/draft7/allOf.json") +class AnyOfTestSuiteTests extends TestSuiteTests("tests/tests/draft7/anyOf.json") +class BooleanSchemaTestSuiteTests extends TestSuiteTests("tests/tests/draft7/boolean_schema.json") +class ConstTestSuiteTests extends TestSuiteTests("tests/tests/draft7/const.json") +class ContainsTestSuiteTests extends TestSuiteTests("tests/tests/draft7/contains.json") +class DefaultTestSuiteTests extends TestSuiteTests("tests/tests/draft7/default.json") +//class DefinitionsTestSuiteTests extends TestSuiteTests("tests/tests/draft7/definitions.json") +class DependenciesTestSuiteTests extends TestSuiteTests("tests/tests/draft7/dependencies.json") +class EnumTestSuiteTests extends TestSuiteTests("tests/tests/draft7/enum.json") +class ExclusiveMaximumTestSuiteTests extends TestSuiteTests("tests/tests/draft7/exclusiveMaximum.json") +class ExclusiveMinimumTestSuiteTests extends TestSuiteTests("tests/tests/draft7/exclusiveMinimum.json") +class FormatTestSuiteTests extends TestSuiteTests("tests/tests/draft7/format.json") +class IfThenElseTestSuiteTests extends TestSuiteTests("tests/tests/draft7/if-then-else.json") +class InfiniteLoopDetectionTestSuiteTests extends TestSuiteTests("tests/tests/draft7/infinite-loop-detection.json") +class ItemsTestSuiteTests extends TestSuiteTests("tests/tests/draft7/items.json") +class MaximumTestSuiteTests extends TestSuiteTests("tests/tests/draft7/maximum.json") +class MaxItemsTestSuiteTests extends TestSuiteTests("tests/tests/draft7/maxItems.json") +class MaxLengthTestSuiteTests extends TestSuiteTests("tests/tests/draft7/maxLength.json") +class MaxPropertiesTestSuiteTests extends TestSuiteTests("tests/tests/draft7/maxProperties.json") +class MinimumTestSuiteTests extends TestSuiteTests("tests/tests/draft7/minimum.json") +class MinItemsTestSuiteTests extends TestSuiteTests("tests/tests/draft7/minItems.json") +class MinLengthTestSuiteTests extends TestSuiteTests("tests/tests/draft7/minLength.json") +class MinPropertiesTestSuiteTests extends TestSuiteTests("tests/tests/draft7/minProperties.json") +class MultipleOfTestSuiteTests extends TestSuiteTests("tests/tests/draft7/multipleOf.json") +class NotTestSuiteTests extends TestSuiteTests("tests/tests/draft7/not.json") +class OneOfTestSuiteTests extends TestSuiteTests("tests/tests/draft7/oneOf.json") +class PatternTestSuiteTests extends TestSuiteTests("tests/tests/draft7/pattern.json") +class PatternPropertiesTestSuiteTests extends TestSuiteTests("tests/tests/draft7/patternProperties.json") +class PropertiesTestSuiteTests extends TestSuiteTests("tests/tests/draft7/properties.json") +class PropertyNamesTestSuiteTests extends TestSuiteTests("tests/tests/draft7/propertyNames.json") +// Not currently running remote tests. +//class RefTestSuiteTests extends TestSuiteTests("tests/tests/draft7/ref.json") +//class RefRemoteTestSuiteTests extends TestSuiteTests("tests/tests/draft7/refRemote.json") +class RequiredTestSuiteTests extends TestSuiteTests("tests/tests/draft7/required.json") +class TypeTestSuiteTests extends TestSuiteTests("tests/tests/draft7/type.json") +class UniqueItemsTestSuiteTests extends TestSuiteTests("tests/tests/draft7/uniqueItems.json") + +class BignumTestSuiteTests extends TestSuiteTests("tests/tests/draft7/optional/bignum.json") +class ContentTestSuiteTests extends TestSuiteTests("tests/tests/draft7/optional/content.json") +//class EcmascriptRegexTestSuiteTests extends TestSuiteTests("tests/tests/draft7/optional/ecmascript-regex.json")") +class NonBmpRegexTestSuiteTests extends TestSuiteTests("tests/tests/draft7/optional/non-bmp-regex.json") +class FloatOverflowTestSuiteTests + extends TestSuiteTests("tests/tests/draft7/optional/float-overflow.json", Configuration(true)) + +class FormatDateTestSuiteTests extends TestSuiteTests("tests/tests/draft7/optional/format/date.json") +class FormatDateTimeTestSuiteTests extends TestSuiteTests("tests/tests/draft7/optional/format/date-time.json") +class FormatEmailTestSuiteTests extends TestSuiteTests("tests/tests/draft7/optional/format/email.json") +class FormatTimeTestSuiteTests extends TestSuiteTests("tests/tests/draft7/optional/format/time.json") +//class FormatUriTestSuiteTests extends TestSuiteTests("tests/tests/draft7/optional/format/uri.json") +//class FormatIdnEmailTestSuiteTests extends TestSuiteTests("tests/tests/draft2019-09/optional/format/idn-email.json")