diff --git a/.github/workflows/build_and_run.yml b/.github/workflows/build_and_run.yml
new file mode 100644
index 0000000..0062842
--- /dev/null
+++ b/.github/workflows/build_and_run.yml
@@ -0,0 +1,30 @@
+name: Build and Test
+
+on: push
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["3.10", "3.11"]
+
+ steps:
+ - name: Checkout repo
+ uses: actions/checkout@v4
+
+ - name: Setup Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ cache: "pip"
+ cache-dependency-path: "**/requirements.txt"
+
+ - name: Install dependencies
+ run: |
+ pip install -r requirements.txt
+
+ - name: Run pytest
+ run: |
+ pip install .['test']
+ pytest --cov=tests/
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 0000000..de5c1f9
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,39 @@
+# This workflow will upload a Python Package using Twine when a release is created
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+name: Upload Python Package
+
+on:
+ release:
+ types:
+ - published
+
+permissions:
+ contents: read
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install build
+ - name: Build package
+ run: python -m build
+ - name: Publish package
+ uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
+ with:
+ user: ${{ secrets.PYPI_USERNAME_STSCI_MAINTAINER }}
+ password: ${{ secrets.PYPI_PASSWORD_STSCI_MAINTAINER }}
diff --git a/.gitignore b/.gitignore
index 7d44098..b43e30f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -44,6 +44,7 @@ nosetests.xml
coverage.xml
*,cover
.hypothesis/
+.pytest_cache/
# Translations
*.mo
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..3341150
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,635 @@
+[MAIN]
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Clear in-memory caches upon conclusion of linting. Useful if running pylint
+# in a server-like mode.
+clear-cache-post-run=no
+
+# Load and enable all available extensions. Use --list-extensions to see a list
+# all available extensions.
+#enable-all-extensions=
+
+# In error mode, messages with a category besides ERROR or FATAL are
+# suppressed, and no reports are done by default. Error mode is compatible with
+# disabling specific errors.
+#errors-only=
+
+# Always return a 0 (non-error) status code, even if lint errors are found.
+# This is primarily useful in continuous integration scripts.
+#exit-zero=
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+extension-pkg-allow-list=lxml,
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
+# for backward compatibility.)
+extension-pkg-whitelist=
+
+# Return non-zero exit code if any of these messages/categories are detected,
+# even if score is above --fail-under value. Syntax same as enable. Messages
+# specified are enabled, while categories only check already-enabled messages.
+fail-on=
+
+# Specify a score threshold under which the program will exit with error.
+fail-under=10
+
+# Interpret the stdin as a python script, whose filename needs to be passed as
+# the module_or_package argument.
+#from-stdin=
+
+# Files or directories to be skipped. They should be base names, not paths.
+ignore=CVS
+
+# Add files or directories matching the regular expressions patterns to the
+# ignore-list. The regex matches against paths and can be in Posix or Windows
+# format. Because '\\' represents the directory delimiter on Windows systems,
+# it can't be used as an escape character.
+ignore-paths=
+
+# Files or directories matching the regular expression patterns are skipped.
+# The regex matches against base names, not paths. The default value ignores
+# Emacs file locks
+ignore-patterns=^\.#
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis). It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use, and will cap the count on Windows to
+# avoid hangs.
+jobs=1
+
+# Control the amount of potential inferred values when inferring a single
+# object. This can help the performance when dealing with large functions or
+# complex, nested conditions.
+limit-inference-results=100
+
+# List of plugins (as comma separated values of python module names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Minimum Python version to use for version dependent checks. Will default to
+# the version used to run pylint.
+py-version=3.10
+
+# Discover python modules and packages in the file system subtree.
+recursive=no
+
+# Add paths to the list of the source roots. Supports globbing patterns. The
+# source root is an absolute path or a path relative to the current working
+# directory used to determine a package namespace for modules located under the
+# source root.
+source-roots=
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+# In verbose mode, extra non-checker-related info will be displayed.
+#verbose=
+
+
+[BASIC]
+
+# Naming style matching correct argument names.
+argument-naming-style=snake_case
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style. If left empty, argument names will be checked with the set
+# naming style.
+#argument-rgx=
+
+# Naming style matching correct attribute names.
+attr-naming-style=snake_case
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style. If left empty, attribute names will be checked with the set naming
+# style.
+#attr-rgx=
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names=foo,
+ bar,
+ baz,
+ toto,
+ tutu,
+ tata
+
+# Bad variable names regexes, separated by a comma. If names match any regex,
+# they will always be refused
+bad-names-rgxs=
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style. If left empty, class attribute names will be checked
+# with the set naming style.
+#class-attribute-rgx=
+
+# Naming style matching correct class constant names.
+class-const-naming-style=UPPER_CASE
+
+# Regular expression matching correct class constant names. Overrides class-
+# const-naming-style. If left empty, class constant names will be checked with
+# the set naming style.
+#class-const-rgx=
+
+# Naming style matching correct class names.
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names. Overrides class-naming-
+# style. If left empty, class names will be checked with the set naming style.
+#class-rgx=
+
+# Naming style matching correct constant names.
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style. If left empty, constant names will be checked with the set naming
+# style.
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names.
+function-naming-style=snake_case
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style. If left empty, function names will be checked with the set
+# naming style.
+#function-rgx=
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names=i,
+ j,
+ k,
+ ex,
+ Run,
+ _
+
+# Good variable names regexes, separated by a comma. If names match any regex,
+# they will always be accepted
+good-names-rgxs=
+
+# Include a hint for the correct naming format with invalid-name.
+include-naming-hint=no
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style. If left empty, inline iteration names will be checked
+# with the set naming style.
+#inlinevar-rgx=
+
+# Naming style matching correct method names.
+method-naming-style=snake_case
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style. If left empty, method names will be checked with the set naming style.
+#method-rgx=
+
+# Naming style matching correct module names.
+module-naming-style=snake_case
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style. If left empty, module names will be checked with the set naming style.
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+# These decorators are taken in consideration only for invalid-name.
+property-classes=abc.abstractproperty
+
+# Regular expression matching correct type alias names. If left empty, type
+# alias names will be checked with the set naming style.
+#typealias-rgx=
+
+# Regular expression matching correct type variable names. If left empty, type
+# variable names will be checked with the set naming style.
+#typevar-rgx=
+
+# Naming style matching correct variable names.
+variable-naming-style=snake_case
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style. If left empty, variable names will be checked with the set
+# naming style.
+#variable-rgx=
+
+
+[CLASSES]
+
+# Warn about protected attribute access inside special methods
+check-protected-access-in-special-methods=no
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+ __new__,
+ setUp,
+ asyncSetUp,
+ __post_init__
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+
+[DESIGN]
+
+# List of regular expressions of class ancestor names to ignore when counting
+# public methods (see R0903)
+exclude-too-few-public-methods=
+
+# List of qualified class names to ignore when counting class parents (see
+# R0901)
+ignored-parents=
+
+# Maximum number of arguments for function / method.
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr=5
+
+# Maximum number of branch for function / method body.
+max-branches=12
+
+# Maximum number of locals for function / method body.
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body.
+max-returns=6
+
+# Maximum number of statements in function / method body.
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when caught.
+overgeneral-exceptions=builtins.BaseException,builtins.Exception
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )??$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=120
+
+# Maximum number of lines in a module.
+max-module-lines=1000
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[IMPORTS]
+
+# List of modules that can be imported at any level, not just the top level
+# one.
+allow-any-import-level=
+
+# Allow explicit reexports by alias from a package __init__.
+allow-reexport-from-package=no
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Deprecated modules which should not be used, separated by a comma.
+deprecated-modules=
+
+# Output a graph (.gv or any supported image format) of external dependencies
+# to the given file (report RP0402 must not be disabled).
+ext-import-graph=
+
+# Output a graph (.gv or any supported image format) of all (i.e. internal and
+# external) dependencies to the given file (report RP0402 must not be
+# disabled).
+import-graph=
+
+# Output a graph (.gv or any supported image format) of internal dependencies
+# to the given file (report RP0402 must not be disabled).
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+# Couples of modules and preferred modules, separated by a comma.
+preferred-modules=
+
+
+[LOGGING]
+
+# The type of string formatting that logging methods do. `old` means using %
+# formatting, `new` is for `{}` formatting.
+logging-format-style=old
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules=logging
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
+# UNDEFINED.
+confidence=HIGH,
+ CONTROL_FLOW,
+ INFERENCE,
+ INFERENCE_FAILURE,
+ UNDEFINED
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once). You can also use "--disable=all" to
+# disable everything first and then re-enable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+disable=raw-checker-failed,
+ bad-inline-option,
+ locally-disabled,
+ file-ignored,
+ suppressed-message,
+ useless-suppression,
+ deprecated-pragma,
+ use-symbolic-message-instead,
+ use-implicit-booleaness-not-comparison-to-string,
+ use-implicit-booleaness-not-comparison-to-zero,
+ unspecified-encoding,
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=
+
+
+[METHOD_ARGS]
+
+# List of qualified names (i.e., library.method) which require a timeout
+# parameter e.g. 'requests.api.get,requests.api.post'
+timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,
+ XXX,
+ TODO
+
+# Regular expression of note tags to take in consideration.
+notes-rgx=
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=sys.exit,argparse.parse_error
+
+
+[REPORTS]
+
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'fatal', 'error', 'warning', 'refactor',
+# 'convention', and 'info' which contain the number of messages in each
+# category, as well as 'statement' which is the total number of statements
+# analyzed. This score is used by the global evaluation report (RP0004).
+evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details.
+msg-template=
+
+# Set the output format. Available formats are: text, parseable, colorized,
+# json2 (improved json format), json (old json format) and msvs (visual
+# studio). You can also give a reporter class, e.g.
+# mypackage.mymodule.MyReporterClass.
+#output-format=
+
+# Tells whether to display a full report or only the messages.
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[SIMILARITIES]
+
+# Comments are removed from the similarity computation
+ignore-comments=yes
+
+# Docstrings are removed from the similarity computation
+ignore-docstrings=yes
+
+# Imports are removed from the similarity computation
+ignore-imports=yes
+
+# Signatures are removed from the similarity computation
+ignore-signatures=yes
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[SPELLING]
+
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions=4
+
+# Spelling dictionary name. No available dictionaries : You need to install
+# both the python package and the system dependency for enchant to work.
+spelling-dict=
+
+# List of comma separated words that should be considered directives if they
+# appear at the beginning of a comment and should not be checked.
+spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains the private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to the private dictionary (see the
+# --spelling-private-dict-file option) instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[STRING]
+
+# This flag controls whether inconsistent-quotes generates a warning when the
+# character used as a quote delimiter is used inconsistently within a module.
+check-quote-consistency=no
+
+# This flag controls whether the implicit-str-concat should generate a warning
+# on implicit string concatenation in sequences defined over several lines.
+check-str-concat-over-line-jumps=no
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether to warn about missing members when the owner of the attribute
+# is inferred to be None.
+ignore-none=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of symbolic message names to ignore for Mixin members.
+ignored-checks-for-mixins=no-member,
+ not-async-context-manager,
+ not-context-manager,
+ attribute-defined-outside-init
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+# Regex pattern to define which classes are considered mixins.
+mixin-class-rgx=.*[Mm]ixin
+
+# List of decorators that change the signature of a decorated function.
+signature-mutators=
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid defining new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of names allowed to shadow builtins
+allowed-redefined-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,
+ _cb
+
+# A regular expression matching the name of dummy variables (i.e. expected to
+# not be used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored.
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..fb688ed
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2023, MAST Archive Developers
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/README.md b/README.md
index e69de29..462443d 100644
--- a/README.md
+++ b/README.md
@@ -0,0 +1,77 @@
+# vo-models
+
+`vo-models` an open-source project to provide Python models and OpenAPI specifications for [IVOA](https://www.ivoa.net/) service protocols.
+
+The project is designed to be used by IVOA members, service implementors, and developers to help facilitate the development of IVOA-compliant services and clients.
+
+## Features
+
+- **Pydantic-xml Models:** The project includes Python models for IVOA protocols, using [pydantic-xml](https://github.com/dapper91/pydantic-xml). Based on [Pydantic](https://docs.pydantic.dev/latest/), these models describe transactions for an IVOA protocol, such as UWS, and feature automatic validation, parsing and serialization of XML data for use with Python clients and web frameworks.
+
+- **OpenAPI Specifications:** The project includes OpenAPI definitions for IVOA protocols. The use of OpenAPI provides a standardized and machine-readable way to describe the IVOA protocols. OpenAPI specifications offer benefits such as automatic documentation generation, and automatic client and server code generation.
+
+- **Expandability:** The project is designed with future expansion in mind. Plans include extending the schema and models to cover other IVOA standards.
+
+## Protocols
+
+The following IVOA protocols are currently supported / under development:
+
+- **UWS (Universal Worker Service) version 1.1:**
+ - Active development:
+ - OpenAPI Models
+ - Pydantic-XML Models
+ - Planned:
+ - OpenAPI Service Definition
+
+## Installation
+
+The latest version of the project can be installed from PyPI:
+
+```bash
+pip install vo-models
+```
+
+### Conda
+
+To install the project using Conda, you can use the provided environment file:
+
+```bash
+git clone https://github.com/spacetelescope/vo-models.git
+cd vo-models
+conda env create -f environment.yml
+conda activate vo-models
+pip install -r requirements.txt
+pip install .
+```
+
+For active development, install the project in development mode:
+
+```bash
+pip install -e .[dev,test]
+```
+
+## Usage
+### OpenAPI Schema
+
+OpenAPI schema files representing IVOA protocol transactions can be found in the `vo/models/openapi` directory.
+
+For each protocol, two files are provided: a `components.yml` file containing the JSON/XML schema definitions for request / response transactions, and a file named after the protocol (e.g. `uws.yml`) containing the OpenAPI specification for the protocol. The schema models, and the OpenAPI specification, are viewable in the [Swagger Editor](https://editor.swagger.io/).
+
+*Note: Currently, the OpenAPI API definition files are not guaranteed to be complete. They are provided as a starting point for future development, and an example of how the schema definitions can be used.*
+
+### Pydantic-XML Models
+
+Python models using [pydantic-xml](https://github.com/dapper91/pydantic-xml), a library based on [Pydantic](https://docs.pydantic.dev/latest/), are provided in the `vo/models/xml` directory.
+
+These models can be used to parse and validate XML data into Python objects, as well as serialize Python objects into XML data. These models can be used with any Python web framework, but are particularly useful when used libraries that leverage the power of Pydantic, such as [FastAPI](https://fastapi.tiangolo.com/).
+
+### Contributing
+
+Contributions to the project are more than welcome. Collaboration and discussion with other IVOA members, service implementors, and developers is what started this project, and is what makes the IVOA great.
+
+If you are interested in contributing, whether that be adding a new protocol, improving the schema, fixing a bug or even a typo, please feel free to open an issue or pull request.
+
+
+### License
+
+This project is licensed under the [MIT License](LICENSE).
\ No newline at end of file
diff --git a/environment.yml b/environment.yml
index 5b1ac8f..34f0672 100644
--- a/environment.yml
+++ b/environment.yml
@@ -1,11 +1,10 @@
name: "vo-models"
channels:
- - file:///grp/dmd/conda/
+ - defaults
- conda-forge
dependencies:
- python>=3.10,<3.11
- pip
- wheel
- pip:
- - -r requirements.txt
- pip-tools
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..f7eb162
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,43 @@
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "vo-models"
+version = "0.1.0"
+authors = [
+ {name = "Joshua Fraustro", email="jfraustro@stsci.edu"},
+ {name = "MAST Archive Developers", email="archive@stsci.edu"}
+]
+description = "Data models for IVOA protocols"
+readme = "README.md"
+requires-python = ">=3.10"
+
+dependencies = [
+ "pydantic-xml[lxml]>=2.6.0",
+ ]
+
+classifiers = [
+
+ "Topic :: Scientific/Engineering :: Astronomy",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+
+ "License :: OSI Approved :: MIT License",
+]
+keywords = [
+ "ivoa",
+ "uws",
+ "pydantic",
+ "pydantic-xml",
+ "openapi",
+ ]
+
+[project.optional-dependencies]
+test = ["pytest", "pytest-cov"]
+dev = ["pylint"]
+
+[project.urls]
+Homepage = "https://github.com/spacetelescope/vo-models"
+Issues = "https://github.com/spacetelescope/vo-models/issues"
+
+
diff --git a/requirements.txt b/requirements.txt
index a4ecc12..c0fd4cf 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -8,6 +8,100 @@ annotated-types==0.6.0 \
--hash=sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43 \
--hash=sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d
# via pydantic
+lxml==4.9.3 \
+ --hash=sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3 \
+ --hash=sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d \
+ --hash=sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a \
+ --hash=sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120 \
+ --hash=sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305 \
+ --hash=sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287 \
+ --hash=sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23 \
+ --hash=sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52 \
+ --hash=sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f \
+ --hash=sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4 \
+ --hash=sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584 \
+ --hash=sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f \
+ --hash=sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693 \
+ --hash=sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef \
+ --hash=sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5 \
+ --hash=sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02 \
+ --hash=sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc \
+ --hash=sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7 \
+ --hash=sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da \
+ --hash=sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a \
+ --hash=sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40 \
+ --hash=sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8 \
+ --hash=sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd \
+ --hash=sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601 \
+ --hash=sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c \
+ --hash=sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be \
+ --hash=sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2 \
+ --hash=sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c \
+ --hash=sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129 \
+ --hash=sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc \
+ --hash=sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2 \
+ --hash=sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1 \
+ --hash=sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7 \
+ --hash=sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d \
+ --hash=sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477 \
+ --hash=sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d \
+ --hash=sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e \
+ --hash=sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7 \
+ --hash=sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2 \
+ --hash=sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574 \
+ --hash=sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf \
+ --hash=sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b \
+ --hash=sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98 \
+ --hash=sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12 \
+ --hash=sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42 \
+ --hash=sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35 \
+ --hash=sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d \
+ --hash=sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce \
+ --hash=sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d \
+ --hash=sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f \
+ --hash=sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db \
+ --hash=sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4 \
+ --hash=sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694 \
+ --hash=sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac \
+ --hash=sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2 \
+ --hash=sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7 \
+ --hash=sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96 \
+ --hash=sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d \
+ --hash=sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b \
+ --hash=sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a \
+ --hash=sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13 \
+ --hash=sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340 \
+ --hash=sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6 \
+ --hash=sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458 \
+ --hash=sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c \
+ --hash=sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c \
+ --hash=sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9 \
+ --hash=sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432 \
+ --hash=sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991 \
+ --hash=sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69 \
+ --hash=sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf \
+ --hash=sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb \
+ --hash=sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b \
+ --hash=sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833 \
+ --hash=sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76 \
+ --hash=sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85 \
+ --hash=sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e \
+ --hash=sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50 \
+ --hash=sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8 \
+ --hash=sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4 \
+ --hash=sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b \
+ --hash=sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5 \
+ --hash=sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190 \
+ --hash=sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7 \
+ --hash=sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa \
+ --hash=sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0 \
+ --hash=sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9 \
+ --hash=sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0 \
+ --hash=sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b \
+ --hash=sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5 \
+ --hash=sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7 \
+ --hash=sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4
+ # via pydantic-xml
pydantic==2.5.2 \
--hash=sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0 \
--hash=sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd
@@ -119,10 +213,12 @@ pydantic-core==2.14.5 \
--hash=sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18 \
--hash=sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867
# via pydantic
-pydantic-xml==2.4.0 \
- --hash=sha256:77cb2b51401971aeaf98f5ac4a1a090eb0197ab84a719281eefcdb7733310e78 \
- --hash=sha256:fd914627c39ef994f5055fe3f8a2099772dd3c83f0ef3045568a4b0b9bed28e5
- # via vo-models (setup.py)
+pydantic-xml[lxml]==2.6.0 \
+ --hash=sha256:2a7019dcfcf68b0136378e72efbe0747ec21f7ba16bfcaf6ab5a47dd1f2d68e9 \
+ --hash=sha256:8a88d0e8af20406eca06af20e57b1d2ccb7d1630c00f927e1f839629c0decb1e
+ # via
+ # pydantic-xml
+ # vo-models (pyproject.toml)
typing-extensions==4.8.0 \
--hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \
--hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef
diff --git a/setup.py b/setup.py
deleted file mode 100755
index 140e890..0000000
--- a/setup.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-try:
- from setuptools import setup
-except ImportError:
- from distutils.core import setup
-
-
-with open('README.md') as readme_file:
- readme = readme_file.read()
-
-
-requirements = [
- "pydantic-xml"
- ]
-
-
-setup(
- name='vo-models',
- version='0.1.0',
- description="Open-source data models for IVOA specifications",
- author="Joshua Fraustro",
- author_email='jfraustro@stsci.edu',
- url='https://github.com/jwfraustro/vo-models',
- packages=[
- 'vo-models',
- ],
- package_dir={'vo-models':
- 'vo-models'},
- include_package_data=True,
- install_requires=requirements,
- keywords='vo-models',
-)
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/uws/UWS-Schema-V1.0.xsd b/tests/uws/UWS-Schema-V1.0.xsd
new file mode 100644
index 0000000..0a6164e
--- /dev/null
+++ b/tests/uws/UWS-Schema-V1.0.xsd
@@ -0,0 +1,440 @@
+
+
+
+
+
+
+
+
+
+
+ the execution phase - returned at
+ /{jobs}/{job-id}/phase
+
+
+
+
+
+
+ this is a client supplied identifier - the UWS system
+ does nothing other than to return it as part of the
+ description of the job
+
+
+
+
+
+
+ the owner (creator) of the job - this should be
+ expressed as a string that can be parsed in accordance
+ with IVOA security standards. If there was no
+ authenticated job creator then this should be set to
+ NULL.
+
+
+
+
+
+
+ The instant at which the job was created.
+
+
+
+
+
+
+
+
+
+
+
+ standard xlink references
+
+
+
+
+
+
+
+
+ Enumeration of possible phases of job execution
+
+
+
+
+
+
+ The first phase a job is entered into - this is where
+ a job is being set up but no request to run has
+ occurred.
+
+
+
+
+
+
+ A job has been accepted for execution but is waiting
+ in a queue
+
+
+
+
+
+ A job is running
+
+
+
+
+
+ A job has completed successfully
+
+
+
+
+
+
+ Some form of error has occurred
+
+
+
+
+
+
+ The job is in an unknown state.
+
+
+
+
+
+
+ The job is HELD pending execution and will not
+ automatically be executed - can occur after a
+ PHASE=RUN request has been made (cf PENDING).
+
+
+
+
+
+
+ The job has been suspended by the system during
+ execution
+
+
+
+
+
+
+ The job has been aborted, either by user request or by
+ the server because of lack or overuse of resources.
+
+
+
+
+
+
+ The job has been archived by the server at destruction time. An archived job
+ may have deleted the results to reclaim resources, but must have job metadata preserved.
+ This is an alternative that the server may choose in contrast to completely destroying all record of the job.
+
+
+
+
+
+
+
+
+ The complete representation of the state of a job
+
+
+
+
+
+
+
+ this is a client supplied identifier - the UWS system
+ does nothing other than to return it as part of the
+ description of the job
+
+
+
+
+
+
+ the owner (creator) of the job - this should be
+ expressed as a string that can be parsed in accordance
+ with IVOA security standards. If there was no
+ authenticated job creator then this should be set to
+ NULL.
+
+
+
+
+
+
+ the execution phase - returned at
+ /{jobs}/{job-id}/phase
+
+
+
+
+
+
+ A Quote predicts when the job is likely to complete -
+ returned at /{jobs}/{job-id}/quote "don't know" is
+ encoded by setting to the XML null value
+ xsi:nil="true"
+
+
+
+
+
+
+ The instant at which the job was created.
+
+
+ Note that the version 1.1 of the specification requires that this element be present.
+ It is optional only in versions 1.x of the schema for backwards compatibility.
+ 2.0+ versions of the schema will make this formally mandatory in an XML sense.
+
+
+
+
+
+
+ The instant at which the job started execution.
+
+
+
+
+
+
+ The instant at which the job finished execution
+
+
+
+
+
+
+ The duration (in seconds) for which the job should be
+ allowed to run - a value of 0 is intended to mean
+ unlimited - returned at
+ /{jobs}/{job-id}/executionduration
+
+
+
+
+
+
+ The time at which the whole job + records + results
+ will be destroyed. returned at
+ /{jobs}/{job-id}/destruction
+
+
+
+
+
+
+ The parameters to the job (where appropriate) can also
+ be retrieved at /{jobs}/{job-id}/parameters
+
+
+
+
+
+
+ The results for the job - can also be retrieved at
+ /{jobs}/{job-id}/results
+
+
+
+
+
+
+
+
+ This is arbitrary information that can be added to the
+ job description by the UWS implementation.
+
+
+
+
+
+
+
+
+
+
+
+ note that this attribute is actually required by the 1.1 specification - however remains optional in the schema
+ for backwards compatibility. It will be formally required in the next major revision.
+
+
+
+
+
+
+ The identifier for the job
+
+
+
+
+
+
+ The version of the UWS standard that the server complies with.
+
+
+
+
+
+
+ This is the information that is returned
+ when a GET is made for a single job resource - i.e.
+ /{jobs}/{job-id}
+
+
+
+
+ The list of job references returned at
+ /(jobs)
+ The list presented may be affected by the current security context and may be filtered
+
+
+
+
+
+
+
+
+ note that this attribute is actually required by the 1.1 specification - however remains optional in the schema
+ for backwards compatibility. It will be formally required in the next major revision.
+
+
+
+
+
+
+ A reference to a UWS result.
+
+
+
+
+
+
+
+
+
+
+ The element returned for
+ /{jobs}/{job-id}/results
+
+
+
+
+
+
+
+
+
+
+ A short summary of an error - a fuller representation of the
+ error may be retrieved from /{jobs}/{job-id}/error
+
+
+
+
+
+
+
+
+ characterization of the type of the error
+
+
+
+
+
+ if true then there is a more detailed error message available at /{jobs}/{job-id}/error
+
+
+
+
+
+
+
+
+
+
+ the list of input parameters to the job - if
+ the job description language does not naturally have
+ parameters, then this list should contain one element which
+ is the content of the original POST that created the job.
+
+
+
+
+ if this attribute is true then the
+ content of the parameter represents a URL to retrieve the
+ actual parameter value.
+ It is up to the implementation to decide
+ if a parameter value cannot be returned directly as the
+ content - the basic rule is that the representation of
+ the parameter must allow the whole job element to be
+ valid XML. If this cannot be achieved then the parameter
+ value must be returned by reference.
+
+
+
+
+ the identifier for the parameter
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/tests/uws/__init__.py b/tests/uws/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/uws/uws_models_test.py b/tests/uws/uws_models_test.py
new file mode 100644
index 0000000..4122717
--- /dev/null
+++ b/tests/uws/uws_models_test.py
@@ -0,0 +1,457 @@
+"""Tests for the XML serialization of UWS elements"""
+
+from datetime import timezone as tz
+from unittest import TestCase
+from xml.etree.ElementTree import canonicalize
+
+from lxml import etree
+
+from vo_models.xml.uws import (
+ ErrorSummary,
+ Jobs,
+ JobSummary,
+ Parameter,
+ Parameters,
+ ResultReference,
+ Results,
+ ShortJobDescription,
+)
+from vo_models.xml.uws.types import ExecutionPhase
+from vo_models.xml.voresource.types import UTCTimestamp
+
+UWS_NAMESPACE_HEADER = """xmlns:uws="http://www.ivoa.net/xml/UWS/v1.0"
+xmlns:xlink="http://www.w3.org/1999/xlink"
+xmlns:xsd="http://www.w3.org/2001/XMLSchema"
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+"""
+
+# New schema versions can be downloaded from https://www.ivoa.net/xml/ under "UWS - Universal Worker Service"
+# The most current version is 1.1, found here: https://www.ivoa.net/xml/UWS/UWS-v1.1.xsd
+with open("tests/uws/UWS-Schema-V1.0.xsd", "r") as schema_file:
+ uws_schema = etree.XMLSchema(etree.parse(schema_file))
+
+
+class TestErrorSummaryType(TestCase):
+ """Tests for the UWS errorSummary complex type"""
+
+ test_error_summary_xml = (
+ f"'
+ "Invalid query."
+ ""
+ )
+
+ def test_read_from_xml(self):
+ """Test reading from XML"""
+
+ error_summary = ErrorSummary.from_xml(self.test_error_summary_xml)
+ self.assertEqual(error_summary.type, "transient")
+ self.assertEqual(error_summary.has_detail, True)
+ self.assertEqual(error_summary.message, "Invalid query.")
+
+ def test_write_to_xml(self):
+ """Test writing to XML"""
+
+ error_summary = ErrorSummary(type="transient", has_detail=True, message="Invalid query.")
+ error_summary_xml = error_summary.to_xml(encoding=str)
+
+ self.assertEqual(
+ canonicalize(self.test_error_summary_xml, strip_text=True),
+ canonicalize(error_summary_xml, strip_text=True),
+ )
+
+class TestParameterType(TestCase):
+ """Tests for the UWS Parameter complex type"""
+
+ test_parameter_xml = (
+ f''
+ "test_value"
+ ""
+ )
+
+ def test_read_from_xml(self):
+ """Test reading from XML"""
+
+ parameter = Parameter.from_xml(self.test_parameter_xml)
+ self.assertEqual(parameter.by_reference, False)
+ self.assertEqual(parameter.id, "param1")
+ self.assertEqual(parameter.is_post, False)
+ self.assertEqual(parameter.value, "test_value")
+
+ def test_write_to_xml(self):
+ """Test writing to XML"""
+
+ parameter = Parameter(by_reference=False, id="param1", is_post=False, value="test_value")
+ parameter_xml = parameter.to_xml(encoding=str)
+
+ self.assertEqual(
+ canonicalize(self.test_parameter_xml, strip_text=True),
+ canonicalize(parameter_xml, strip_text=True),
+ )
+
+
+class TestResultReferenceType(TestCase):
+ """Test the UWS ResultReference complex type"""
+
+ test_result_reference_xml = (
+ f"'
+ )
+
+ def test_read_from_xml(self):
+ """Test reading from XML"""
+
+ result_reference = ResultReference.from_xml(self.test_result_reference_xml)
+ self.assertEqual(result_reference.id, "result1")
+ self.assertEqual(result_reference.mime_type, "text/xml")
+ self.assertEqual(result_reference.href, "http://testlink.com/")
+ self.assertEqual(result_reference.type, "simple")
+ self.assertEqual(result_reference.size, 1234)
+
+ def test_write_to_xml(self):
+ """Test writing to XML"""
+
+ result_reference = ResultReference(
+ id="result1",
+ mime_type="text/xml",
+ href="http://testlink.com/",
+ type="simple",
+ size=1234,
+ )
+ result_reference_xml = result_reference.to_xml(encoding=str)
+ self.assertEqual(
+ canonicalize(self.test_result_reference_xml, strip_text=True),
+ canonicalize(result_reference_xml, strip_text=True),
+ )
+
+
+class TestResultsElement(TestCase):
+ """Test the results list element"""
+
+ test_results_xml = (
+ f""
+ ""
+ ""
+ ""
+ )
+
+ def test_read_from_xml(self):
+ """Test reading from XML"""
+
+ results = Results.from_xml(self.test_results_xml)
+ self.assertEqual(len(results.results), 2)
+ self.assertEqual(results.results[0].id, "result1")
+ self.assertEqual(results.results[1].id, "result2")
+
+ def test_write_to_xml(self):
+ """Test writing to XML"""
+
+ results_list = Results(
+ results=[
+ ResultReference(
+ id="result1",
+ mime_type="text/xml",
+ href="http://testlink.com/",
+ ),
+ ResultReference(
+ id="result2",
+ mime_type="text/xml",
+ href="http://testlink.com/",
+ ),
+ ]
+ )
+ results_xml = results_list.to_xml(encoding=str, skip_empty=True)
+
+ self.assertEqual(
+ canonicalize(self.test_results_xml, strip_text=True),
+ canonicalize(results_xml, strip_text=True),
+ )
+
+ def test_validate(self):
+ """Test validation against XML schema"""
+
+ results = Results(
+ results=[
+ ResultReference(
+ id="result1",
+ mime_type="text/xml",
+ href="http://testlink.com/",
+ ),
+ ResultReference(
+ id="result2",
+ mime_type="text/xml",
+ href="http://testlink.com/",
+ ),
+ ]
+ )
+ results_xml = etree.fromstring(results.to_xml(encoding=str, skip_empty=True))
+ uws_schema.assertValid(results_xml)
+
+
+class TestShortJobDescriptionType(TestCase):
+ """Test the UWS ShortJobDescription complex type"""
+
+ test_short_job_description_xml = (
+ f''
+ "PENDING"
+ "runId1"
+ ""
+ "1900-01-01T01:01:01.000Z"
+ ""
+ )
+
+ def test_read_from_xml(self):
+ """Test reading from XML"""
+
+ short_job_description = ShortJobDescription.from_xml(self.test_short_job_description_xml)
+ self.assertEqual(short_job_description.job_id, "id1")
+ self.assertEqual(short_job_description.type, "simple")
+ self.assertEqual(short_job_description.href, "http://uri1")
+ self.assertEqual(short_job_description.phase, "PENDING")
+ self.assertEqual(short_job_description.run_id, "runId1")
+ self.assertEqual(short_job_description.owner_id, None)
+ self.assertEqual(short_job_description.creation_time, UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc))
+
+ def test_write_to_xml(self):
+ """Test writing to XML"""
+
+ short_job_description = ShortJobDescription(
+ job_id="id1",
+ type="simple",
+ href="http://uri1",
+ phase="PENDING",
+ run_id="runId1",
+ creation_time=UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc),
+ )
+ short_job_description_xml = short_job_description.to_xml(encoding=str)
+
+ self.assertEqual(
+ canonicalize(self.test_short_job_description_xml, strip_text=True),
+ canonicalize(short_job_description_xml, strip_text=True),
+ )
+
+
+class TestParametersElement(TestCase):
+ """Test the UWS Parameters element"""
+
+ test_parameters_xml = (
+ f""
+ 'value1'
+ 'value2'
+ 'value3'
+ ""
+ )
+
+ def test_read_from_xml(self):
+ """Test reading from XML"""
+
+ parameters = Parameters.from_xml(self.test_parameters_xml)
+ self.assertEqual(len(parameters.parameter), 3)
+
+ self.assertEqual(parameters.parameter[0].id, "param1")
+ self.assertEqual(parameters.parameter[1].id, "param2")
+ self.assertEqual(parameters.parameter[2].id, "param3")
+
+ self.assertEqual(parameters.parameter[0].value, "value1")
+ self.assertEqual(parameters.parameter[1].value, "value2")
+ self.assertEqual(parameters.parameter[2].value, "value3")
+
+ def test_write_to_xml(self):
+ """Test writing to XML"""
+
+ parameters_element = Parameters(
+ parameter=[
+ Parameter(id="param1", value="value1"),
+ Parameter(id="param2", value="value2"),
+ Parameter(id="param3", value="value3"),
+ ]
+ )
+ parameters_xml = parameters_element.to_xml(skip_empty=True, encoding=str)
+
+ self.assertEqual(
+ canonicalize(self.test_parameters_xml, strip_text=True),
+ canonicalize(parameters_xml, strip_text=True),
+ )
+
+ def test_validate(self):
+ """Test validation against XML schema"""
+
+ parameters = Parameters(
+ parameter=[
+ Parameter(id="param1", value="value1"),
+ Parameter(id="param2", value="value2"),
+ Parameter(id="param3", value="value3"),
+ ]
+ )
+ parameters_xml = etree.fromstring(parameters.to_xml(skip_empty=True, encoding=str))
+ uws_schema.assertValid(parameters_xml)
+
+
+class TestJobSummaryElement(TestCase):
+ """Test the UWS JobSummary element"""
+
+ job_summary_xml = (
+ f''
+ "jobId1"
+ "runId1"
+ "ownerId1"
+ "PENDING"
+ ''
+ "1900-01-01T01:01:01.000Z"
+ "1900-01-01T01:01:01.000Z"
+ "1900-01-01T01:01:01.000Z"
+ "0"
+ "1900-01-01T01:01:01.000Z"
+ ""
+ 'value1'
+ 'value2'
+ ""
+ ""
+ "jobInfo1"
+ ""
+ )
+
+ def test_read_from_xml(self):
+ """Test reading from XML"""
+
+ job_summary = JobSummary[Parameters].from_xml(self.job_summary_xml)
+ self.assertEqual(job_summary.job_id, "jobId1")
+ self.assertEqual(job_summary.run_id, "runId1")
+ self.assertEqual(job_summary.owner_id, "ownerId1")
+ self.assertEqual(job_summary.phase, ExecutionPhase.PENDING.value)
+ self.assertEqual(job_summary.quote, None)
+ self.assertEqual(job_summary.creation_time, UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc))
+ self.assertEqual(job_summary.start_time, UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc))
+ self.assertEqual(job_summary.end_time, UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc))
+ self.assertEqual(job_summary.execution_duration, 0)
+ self.assertEqual(job_summary.destruction, UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc))
+ self.assertEqual(len(job_summary.parameters.parameter), 2)
+ self.assertEqual(job_summary.parameters.parameter[0].id, "param1")
+ self.assertEqual(job_summary.parameters.parameter[1].id, "param2")
+ self.assertEqual(job_summary.parameters.parameter[0].value, "value1")
+ self.assertEqual(job_summary.parameters.parameter[1].value, "value2")
+ self.assertEqual(len(job_summary.results.results), 0)
+ self.assertEqual(job_summary.error_summary, None)
+ self.assertEqual(job_summary.job_info[0], "jobInfo1")
+
+ def test_write_to_xml(self):
+ """Test writing to XML"""
+
+ job_summary = JobSummary[Parameters](
+ job_id="jobId1",
+ run_id="runId1",
+ owner_id="ownerId1",
+ phase=ExecutionPhase.PENDING,
+ quote=None,
+ creation_time=UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc),
+ start_time=UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc),
+ end_time=UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc),
+ execution_duration=0,
+ destruction=UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc),
+ parameters=Parameters(
+ parameter=[
+ Parameter(id="param1", value="value1"),
+ Parameter(id="param2", value="value2"),
+ ]
+ ),
+ results=Results(),
+ job_info=["jobInfo1"],
+ )
+ job_summary_xml = job_summary.to_xml(encoding=str)
+
+ self.assertEqual(
+ canonicalize(self.job_summary_xml, strip_text=True),
+ canonicalize(job_summary_xml, strip_text=True),
+ )
+
+ def test_validate(self):
+ """Validate against the schema"""
+
+ job_summary = JobSummary[Parameters](
+ job_id="jobId1",
+ run_id="runId1",
+ owner_id="ownerId1",
+ phase=ExecutionPhase.PENDING,
+ quote=None,
+ creation_time=UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc),
+ start_time=None,
+ end_time=None,
+ execution_duration=0,
+ destruction=UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc),
+ parameters=Parameters(
+ parameter=[
+ Parameter(id="param1", value="value1"),
+ Parameter(id="param2", value="value2"),
+ ]
+ ),
+ results=Results(results=[ResultReference(id="result1")]),
+ error_summary=None,
+ )
+ job_summary_xml = etree.fromstring(job_summary.to_xml(encoding=str))
+ uws_schema.assertValid(job_summary_xml)
+
+
+class TestJobsElement(TestCase):
+ """Test the UWS Jobs element"""
+
+ test_job_list_xml = (
+ f''
+ ''
+ "PENDING"
+ ""
+ ''
+ "1900-01-01T01:01:01.000Z"
+ ""
+ ""
+ )
+
+ def test_read_from_xml(self):
+ """Test reading from XML"""
+
+ jobs_element = Jobs.from_xml(self.test_job_list_xml)
+ self.assertEqual(len(jobs_element.jobref), 1)
+ self.assertEqual(jobs_element.jobref[0].job_id, "id1")
+ self.assertEqual(jobs_element.jobref[0].phase, ExecutionPhase.PENDING)
+ self.assertEqual(jobs_element.jobref[0].run_id, None)
+ self.assertEqual(jobs_element.jobref[0].owner_id, None)
+ self.assertEqual(jobs_element.jobref[0].creation_time, UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc))
+
+ def test_write_to_xml(self):
+ """Test writing to XML"""
+
+ jobs_element = Jobs(
+ jobref=[
+ ShortJobDescription(
+ job_id="id1",
+ owner_id=None,
+ href="http://uri1",
+ phase=ExecutionPhase.PENDING,
+ creation_time=UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc),
+ )
+ ]
+ )
+ jobs_element_xml = jobs_element.to_xml(encoding=str)
+
+ self.assertEqual(
+ canonicalize(self.test_job_list_xml, strip_text=True),
+ canonicalize(jobs_element_xml, strip_text=True),
+ )
+
+ def test_validate(self):
+ """Validate against the schema"""
+
+ jobs_element = Jobs(
+ jobref=[
+ ShortJobDescription(
+ job_id="id1",
+ phase=ExecutionPhase.PENDING,
+ creation_time=UTCTimestamp(1900, 1, 1, 1, 1, 1, tzinfo=tz.utc),
+ )
+ ]
+ )
+ jobs_element_xml = etree.fromstring(jobs_element.to_xml(skip_empty=True, encoding=str))
+ uws_schema.assertValid(jobs_element_xml)
diff --git a/tests/voresource/__init__.py b/tests/voresource/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/voresource/voresource_types_test.py b/tests/voresource/voresource_types_test.py
new file mode 100644
index 0000000..350dbf3
--- /dev/null
+++ b/tests/voresource/voresource_types_test.py
@@ -0,0 +1,44 @@
+"""Tests for VOResource simple types"""
+from unittest import TestCase
+
+from vo_models.xml.voresource.types import UTCTimestamp
+
+
+class TestVODatetimeModel(TestCase):
+ """Test VODatetime parsing"""
+
+ def test_vodatetime_parse(self):
+ """Test that datetimes are parsed and output in correct format"""
+
+ # Test allowed string formats
+ good_vo_dt = "2023-03-15T18:27:18.758Z"
+
+ # 2023-03-15T18:27:18.758 (No timezone - Z UTC assumed)
+ vo_dt = UTCTimestamp.fromisoformat("2023-03-15T18:27:18.758")
+ self.assertIsInstance(vo_dt, UTCTimestamp)
+ self.assertEqual(vo_dt.isoformat(), good_vo_dt)
+
+ # 2023-03-15T18:27:18.758Z (Zulu UTC - T separator)
+ vo_dt = UTCTimestamp.fromisoformat("2023-03-15T18:27:18.758Z")
+ self.assertIsInstance(vo_dt, UTCTimestamp)
+ self.assertEqual(vo_dt.isoformat(), good_vo_dt)
+
+ # 2023-03-15 18:27:18.758Z (Zulu UTC - space separator)
+ vo_dt = UTCTimestamp.fromisoformat("2023-03-15 18:27:18.758Z")
+ self.assertIsInstance(vo_dt, UTCTimestamp)
+ self.assertEqual(vo_dt.isoformat(), good_vo_dt)
+
+ # 2023-03-15T18:27:18.758+00:00 (UTC w/ offset - T separator)
+ vo_dt = UTCTimestamp.fromisoformat("2023-03-15T18:27:18.758+00:00")
+ self.assertIsInstance(vo_dt, UTCTimestamp)
+ self.assertEqual(vo_dt.isoformat(), good_vo_dt)
+
+ # 2023-03-15 18:27:18.758+00:00 (UTC w/ offset - space separator)
+ vo_dt = UTCTimestamp.fromisoformat("2023-03-15 18:27:18.758+00:00")
+ self.assertIsInstance(vo_dt, UTCTimestamp)
+ self.assertEqual(vo_dt.isoformat(), good_vo_dt)
+
+ # Test that we reject non-UTC datetimes
+ with self.assertRaises(ValueError):
+ # pylint: disable=protected-access
+ UTCTimestamp._validate("20230315T18:27:18.758")
diff --git a/vo_models/__init__.py b/vo_models/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/vo_models/openapi/uws/components.yml b/vo_models/openapi/uws/components.yml
new file mode 100644
index 0000000..45f3b2e
--- /dev/null
+++ b/vo_models/openapi/uws/components.yml
@@ -0,0 +1,367 @@
+# UWS Version 1.1 - OpenAPI 3.0 Schema components
+# These components can be used to generate a UWS 1.1 compliant OpenAPI service description
+# They have been created to include both XML and JSON representations of UWS resources / service responses
+components:
+ schemas:
+ ShortJobDescription:
+ type: object
+ title: shortJobDescription
+ properties:
+ phase:
+ $ref: '#/components/schemas/ExecutionPhase'
+ runId:
+ type: string
+ maxItems: 1
+ description: |
+ This is a client supplied identifier - the UWS system
+ does nothing other than to return it as part of the
+ description of the job
+ example: 'JWST-1234'
+ ownerId:
+ type: string
+ nullable: true
+ description: |
+ The owner (creator) of the job - this should be
+ expressed as a string that can be parsed in accordance
+ with IVOA security standards. If there was no
+ authenticated job creator then this should be set to
+ NULL.
+ example: 'Noirlab/John.Smith'
+ creationTime:
+ type: string
+ format: date-time
+ nullable: false
+ description: |
+ The instant at which the job was created.
+ id:
+ type: string
+ required: true
+ description: |
+ The identifier for the job
+ example: 'HSC_XYZ_123'
+ xml:
+ attribute: true
+ type:
+ type: string
+ description: |
+ xlink type
+ default: simple
+ xml:
+ prefix: 'xlink'
+ namespace: 'http://www.w3.org/1999/xlink'
+ attribute: true
+ href:
+ type: string
+ description: |
+ xlink href
+ example: '.../jobs/HSC_XYZ_123'
+ xml:
+ prefix: 'xlink'
+ namespace: 'http://www.w3.org/1999/xlink'
+ attribute: true
+ ExecutionPhase:
+ type: string
+ title: executionPhase
+ description: |
+ Enumeration of possible phases of job execution
+
+ PENDING: The first phase a job is entered into - this is where
+ a job is being set up but no request to run has
+ occurred.
+
+ QUEUED: A job has been accepted for execution but is waiting
+ in a queue
+
+ EXECUTING: A job is running
+
+ COMPLETED: A job has completed successfully
+
+ ERROR: Some form of error has occurred
+
+ UNKNOWN: The job is in an unknown state.
+
+ HELD: The job is HELD pending execution and will not
+ automatically be executed - can occur after a
+ PHASE=RUN request has been made (cf PENDING).
+
+ SUSPENDED: The job has been suspended by the system during
+ execution
+
+ ABORTED: The job has been aborted, either by user request or by
+ the server because of lack or overuse of resources.
+
+ ARCHIVED: The job has been archived by the server at destruction time. An archived job
+ may have deleted the results to reclaim resources, but must have job metadata preserved.
+ This is an alternative that the server may choose in contrast to completely destroying all record of the job.
+ enum:
+ - PENDING
+ - QUEUED
+ - EXECUTING
+ - COMPLETED
+ - ERROR
+ - UNKNOWN
+ - HELD
+ - SUSPENDED
+ - ABORTED
+ - ARCHIVED
+ JobSummary:
+ type: object
+ description: |
+ The complete representation of the state of a job
+ title: jobSummary
+ properties:
+ jobId:
+ type: string
+ required: true
+ description: |
+ The identifier for the job
+ example: 'HSC_XYZ_123'
+ runId:
+ type: string
+ maxItems: 1
+ description: |
+ this is a client supplied identifier - the UWS system
+ does nothing other than to return it as part of the
+ description of the job
+ example: 'JWST-1234'
+ ownerId:
+ type: string
+ nullable: true
+ description: |
+ The owner (creator) of the job - this should be
+ expressed as a string that can be parsed in accordance
+ with IVOA security standards. If there was no
+ authenticated job creator then this should be set to
+ NULL.
+ example: 'Noirlab/John.Smith'
+ phase:
+ $ref: '#/components/schemas/ExecutionPhase'
+ quote:
+ type: string
+ format: date-time
+ maxItems: 1
+ nullable: true
+ description: |
+ A Quote predicts when the job is likely to complete -
+ returned at /{jobs}/{job-id}/quote "don't know" is
+ encoded by setting to the XML null value
+ xsi:nil="true"
+ creationTime:
+ type: string
+ format: date-time
+ maxItems: 1
+ description: |
+ The instant at which the job was created.
+
+ Note that the version 1.1 of the specification requires that this element be present.
+ It is optional only in versions 1.x of the schema for backwards compatibility.
+ 2.0+ versions of the schema will make this formally mandatory in an XML sense.
+ startTime:
+ type: string
+ format: date-time
+ nullable: true
+ description: |
+ The instant at which the job started execution.
+ endTime:
+ type: string
+ format: date-time
+ nullable: true
+ description: |
+ The instant at which the job finished execution.
+ executionDuration:
+ type: integer
+ nullable: false
+ description: |
+ The duration (in seconds) for which the job should be
+ allowed to run - a value of 0 is intended to mean
+ unlimited - returned at
+ /{jobs}/{job-id}/executionduration
+ destruction:
+ type: string
+ format: date-time
+ nullable: true
+ description: |
+ The time at which the whole job + records + results
+ will be destroyed. Returned at /{jobs}/{job-id}/destruction
+ parameters:
+ type: object
+ maxItems: 1
+ description: |
+ The parameters to the job (where appropriate) can also
+ be retrieved at /{jobs}/{job-id}/parameters
+ $ref: '#/components/schemas/Parameters'
+ results:
+ type: object
+ description: |
+ The results for the job - can also be retrieved at /{jobs}/{job-id}/results
+ $ref: '#/components/schemas/Results'
+ errorSummary:
+ type: object
+ maxItems: 1
+ $ref: '#/components/schemas/ErrorSummary'
+ jobInfo:
+ type: string
+ maxItems: 1
+ description: |
+ This is arbitrary information that can be added to the
+ job description by the UWS implementation.
+ version:
+ $ref: '#/components/schemas/UWSVersion'
+ optional: true
+ description: |
+ note that this attribute is actually required by the 1.1 specification - however remains optional in the schema
+ for backwards compatibility. It will be formally required in the next major revision.
+ UWSVersion:
+ type: string
+ title: UWSVersion
+ description: |
+ The version of the UWS standard that the server complies with.
+ enum:
+ - 1.0
+ - 1.1
+ xml:
+ prefix: 'uws'
+ attribute: true
+ Job:
+ type: object
+ $ref: '#/components/schemas/JobSummary'
+ title: job
+ description: |
+ This is the information that is returned
+ when a GET is made for a single job resource - i.e.
+ /{jobs}/{job-id}
+ xml:
+ name: job
+ Jobs:
+ type: object
+ title: jobs
+ description: |
+ The list of job references returned at /(jobs)
+
+ The list presented may be affected by the current security context and may be filtered
+ properties:
+ jobref:
+ type: array
+ items:
+ $ref: '#/components/schemas/ShortJobDescription'
+ version:
+ $ref: '#/components/schemas/UWSVersion'
+ optional: true
+ xml:
+ name: jobs
+ ResultReference:
+ type: object
+ title: resultReference
+ description: |
+ A reference to a UWS result
+ required:
+ - id
+ properties:
+ id:
+ type: string
+ xml:
+ attribute: true
+ reference:
+ type: string
+ description: |
+ The URL that can be used to retrieve the result
+ xml:
+ attribute: true
+ prefix: uws
+ size:
+ type: number
+ xml:
+ attribute: true
+ mime-type:
+ type: string
+ xml:
+ attribute: true
+ Results:
+ type: object
+ title: results
+ description: |
+ The element returned for /{jobs}/{job-id}/results
+ properties:
+ result:
+ type: array
+ items:
+ $ref: '#/components/schemas/ResultReference'
+ xml:
+ name: results
+ ErrorSummary:
+ type: object
+ title: errorSummary
+ description: |
+ A short summary of an error - a fuller representation of the
+ error may be retrieved from /{jobs}/{job-id}/error
+ required:
+ - type
+ - hasDetail
+ properties:
+ message:
+ type: string
+ description: |
+ A short message describing the error
+ example: 'Error Message'
+ hasDetail:
+ type: boolean
+ xml:
+ attribute: true
+ type:
+ type: string
+ description: |
+ characterization of the type of the error
+ enum:
+ - transient
+ - fatal
+ xml:
+ attribute: true
+ xml:
+ name: errorSummary
+ Parameter:
+ type: object
+ title: parameter
+ description: |
+ The list of input parameters to the job - if
+ the job description language does not naturally have
+ parameters, then this list should contain one element which
+ is the content of the original POST that created the job.
+ required:
+ - id
+ properties:
+ byReference:
+ type: boolean
+ default: false
+ description: |
+ If this attribute is true then the
+ content of the parameter represents a URL to retrieve the
+ actual parameter value.
+
+ It is up to the implementation to decide
+ if a parameter value cannot be returned directly as the
+ content - the basic rule is that the representation of
+ the parameter must allow the whole job element to be
+ valid XML. If this cannot be achieved then the parameter
+ value must be returned by reference.
+ xml:
+ attribute: true
+ id:
+ type: string
+ description: |
+ The identifier for the parameter
+ xml:
+ attribute: true
+ isPost:
+ type: boolean
+ xml:
+ attribute: true
+ xml:
+ name: parameter
+ Parameters:
+ type: object
+ title: parameters
+ properties:
+ parameter:
+ type: array
+ items:
+ $ref: '#/components/schemas/Parameter'
diff --git a/vo_models/openapi/uws/uws.yml b/vo_models/openapi/uws/uws.yml
new file mode 100644
index 0000000..37bba7d
--- /dev/null
+++ b/vo_models/openapi/uws/uws.yml
@@ -0,0 +1,297 @@
+# Universal Worker Service (UWS) API Pattern
+# OpenAPI 3.0.2
+# Example of a UWS API pattern based on the IVOA UWS 1.1 specification, using OpenAPI schema models.
+# Not guaranteed to be complete or correct.
+openapi: '3.0.2'
+info:
+ title: Universal Worker Service (UWS)
+ version: '1.1'
+servers:
+ - url: https://mast.stsci.edu/vo-tap/api/v0.1/hsc/async
+paths:
+ /jobs:
+ get:
+ description: 'Returns the job list'
+ parameters:
+ - name: PHASE
+ in: query
+ description: 'Execution phase of the job to filter for'
+ schema:
+ $ref: 'components.yml#/components/schemas/ExecutionPhase'
+ - name: AFTER
+ in: query
+ description: 'Return jobs submitted after this date'
+ schema:
+ type: string
+ format: date-time
+ - name: LAST
+ in: query
+ description: 'Return only the last N jobs'
+ schema:
+ type: integer
+ responses:
+ '200':
+ description: Success
+ content:
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/Jobs'
+ post:
+ description: 'Submits a job'
+ parameters:
+ - name: PHASE
+ in: query
+ description: 'Autorun the job if possible'
+ required: false
+ schema:
+ type: string
+ enum:
+ - RUN
+ requestBody:
+ description: 'Job parameters'
+ content:
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/Parameters'
+ responses:
+ '303':
+ description: Success
+ content:
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/JobSummary'
+ /jobs/{job-id}:
+ get:
+ description: 'Returns the job description'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ - name: PHASE
+ in: query
+ description: 'Phase of the job to poll for'
+ schema:
+ type: string
+ enum:
+ - PENDING
+ - QUEUED
+ - EXECUTING
+ - name: WAIT
+ in: query
+ description: 'Maximum time to wait for the job to change phases.'
+ schema:
+ type: integer
+ responses:
+ '200':
+ description: Success
+ content:
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/Job'
+ post:
+ description: Update job parameters
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ requestBody:
+ description: 'Parameters to update'
+ required: true
+ content:
+ application/x-www-form-urlencoded:
+ schema:
+ properties:
+ PHASE:
+ type: string
+ enum:
+ - RUN
+ - ABORT
+ - SUSPEND
+ - ARCHIVE
+ DESTRUCTION:
+ type: string
+ format: date-time
+ ACTION:
+ type: string
+ enum:
+ - DELETE
+ responses:
+ '303':
+ description: Success
+ content:
+ text/plain:
+ schema:
+ type: string
+ format: date-time
+ application/xml:
+ schema:
+ oneOf:
+ - $ref: 'components.yml#/components/schemas/JobSummary'
+ - $ref: 'components.yml#/components/schemas/Jobs'
+ delete:
+ description: 'Deletes the job'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ responses:
+ '303':
+ description: Success
+ content:
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/Jobs'
+ /jobs/{job-id}/phase:
+ get:
+ description: 'Returns the job phase'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ responses:
+ '200':
+ description: Success
+ content:
+ text/plain:
+ schema:
+ $ref: 'components.yml#/components/schemas/ExecutionPhase'
+ post:
+ description: 'Updates the job phase'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ requestBody:
+ description: 'Phase to update'
+ required: true
+ content:
+ application/x-www-form-urlencoded:
+ schema:
+ properties:
+ PHASE:
+ type: string
+ enum:
+ - RUN
+ - ABORT
+ - SUSPEND
+ - ARCHIVE
+ responses:
+ '303':
+ description: Success
+ content:
+ text/plain:
+ schema:
+ type: string
+ format: date-time
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/JobSummary'
+ /jobs/{job-id}/executionduration:
+ get:
+ description: 'Returns the job execution duration'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ responses:
+ '200':
+ description: Success
+ content:
+ text/plain:
+ schema:
+ type: integer
+ post:
+ description: 'Updates the job execution duration'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ requestBody:
+ description: 'Execution duration to update'
+ required: true
+ content:
+ application/x-www-form-urlencoded:
+ schema:
+ properties:
+ EXECUTIONDURATION:
+ type: integer
+ responses:
+ '303':
+ description: Success
+ content:
+ text/plain:
+ schema:
+ type: string
+ format: date-time
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/JobSummary'
+ /jobs/{job-id}/destruction:
+ get:
+ description: 'Returns the job destruction time'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ responses:
+ '200':
+ description: Success
+ content:
+ text/plain:
+ schema:
+ type: string
+ format: date-time
+ /jobs/{job-id}/error:
+ get:
+ description: 'Returns the job error summary'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ responses:
+ '200':
+ description: Success
+ content:
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/ErrorSummary'
+ /jobs/{job-id}/quote:
+ get:
+ description: 'Returns the job quote'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ responses:
+ '200':
+ description: Success
+ content:
+ text/plain:
+ schema:
+ type: string
+ format: date-time
+ /jobs/{job-id}/parameters:
+ get:
+ description: 'Returns the job parameters'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ responses:
+ '200':
+ description: Success
+ content:
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/Parameters'
+ /jobs/{job-id}/results:
+ get:
+ description: 'Returns the job results'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ responses:
+ '200':
+ description: Success
+ content:
+ application/xml:
+ schema:
+ $ref: 'components.yml#/components/schemas/Results'
+ /jobs/{job-id}/owner:
+ get:
+ description: 'Returns the job owner'
+ parameters:
+ - $ref: '#/components/parameters/job-id'
+ responses:
+ '200':
+ description: Success
+ content:
+ text/plain:
+ schema:
+ type: string
+components:
+ parameters:
+ job-id:
+ name: job-id
+ in: path
+ description: 'Job ID'
+ required: true
+ schema:
+ type: string
\ No newline at end of file
diff --git a/vo_models/xml/__init__.py b/vo_models/xml/__init__.py
new file mode 100644
index 0000000..a08474b
--- /dev/null
+++ b/vo_models/xml/__init__.py
@@ -0,0 +1 @@
+"""IVOA resource models using pydantic-xml for XML representation."""
diff --git a/vo_models/xml/uws/__init__.py b/vo_models/xml/uws/__init__.py
new file mode 100644
index 0000000..61135ed
--- /dev/null
+++ b/vo_models/xml/uws/__init__.py
@@ -0,0 +1,18 @@
+"""
+Module containing VO Universal Worker Service (UWS) classes.
+
+Contains pydantic-xml models for UWS request / response serialization.
+IVOA UWS Spec: https://www.ivoa.net/documents/UWS/20161024/REC-UWS-1.1-20161024.html
+"""
+from vo_models.xml.uws.models import (
+ ErrorSummary,
+ Job,
+ Jobs,
+ JobSummary,
+ Parameter,
+ Parameters,
+ ParametersType,
+ ResultReference,
+ Results,
+ ShortJobDescription,
+)
diff --git a/vo_models/xml/uws/models.py b/vo_models/xml/uws/models.py
new file mode 100644
index 0000000..7656468
--- /dev/null
+++ b/vo_models/xml/uws/models.py
@@ -0,0 +1,221 @@
+"""UWS Job Schema using Pydantic-XML models"""
+from typing import Dict, Generic, Optional, TypeVar
+
+from pydantic import field_validator
+from pydantic_xml import BaseXmlModel, attr, element
+
+from vo_models.xml.voresource.types import UTCTimestamp
+from vo_models.xml.uws.types import ErrorType, ExecutionPhase, UWSVersion
+from vo_models.xml.xlink import XlinkType
+
+NSMAP = {
+ "uws": "http://www.ivoa.net/xml/UWS/v1.0",
+ "xlink": "http://www.w3.org/1999/xlink",
+ "xsd": "http://www.w3.org/2001/XMLSchema",
+ "xsi": "http://www.w3.org/2001/XMLSchema-instance",
+}
+
+# pylint: disable=invalid-name
+ParametersType = TypeVar("ParametersType")
+
+class Parameter(BaseXmlModel, tag="parameter", ns="uws", nsmap=NSMAP):
+ """A UWS Job parameter
+
+ The list of input parameters to the job - if the job description language does not naturally have
+ parameters, then this list should contain one element which is the content of the original POST that created the
+ job.
+
+ Attributes:
+ byReference (bool): If this attribute is true then the content of the parameter represents a URL to retrieve the
+ actual parameter value.
+ It is up to the implementation to decide if a parameter value cannot be returned directly as
+ the content - the basic rule is that the representation of the parameter must allow the whole
+ job element to be valid XML. If this cannot be achieved then the parameter value must be
+ returned by reference.
+ id (str): The identifier of the parameter.
+ isPost (bool): Undocumented.
+
+ Content:
+ value (str): the value of the parameter.
+ """
+
+ value: Optional[str] = None
+
+ by_reference: Optional[bool] = attr(name="byReference", default=False)
+ id: str = attr()
+ is_post: Optional[bool] = attr(name="isPost", default=False)
+
+ @field_validator("value", mode="before")
+ def validate_value(cls, value): # pylint: disable=no-self-argument
+ """Coerces value to a string"""
+ # TODO: Find better way to handle arbitrary types
+ if value is not None:
+ return str(value)
+
+
+class Parameters(BaseXmlModel, tag="parameters", ns="uws", nsmap=NSMAP):
+ """A list of UWS Job parameters.
+
+ Elements:
+ parameter (Parameter): a UWS Job parameter.
+ """
+
+ parameter: Optional[list[Parameter]] = element(name="parameter", default_factory=list)
+
+
+class ErrorSummary(BaseXmlModel, tag="errorSummary", ns="uws", nsmap=NSMAP):
+ """A short summary of an error - a fuller representation of the
+ error may be retrieved from /{jobs}/{job-id}/error
+
+ Elements:
+ message (str): a short description of the error.
+
+ Attributes:
+ type (ErrorType): Characterization of the type of the error
+ has_detail (bool): If true then there is a more detailed error message available at /{jobs}/{job-id}/error
+ """
+
+ message: str = element(default="")
+
+ type: ErrorType = attr(default=ErrorType.TRANSIENT)
+ has_detail: bool = attr(name="hasDetail", default=False)
+
+
+class ResultReference(BaseXmlModel, tag="result", ns="uws", skip_empty=True, nsmap=NSMAP):
+ """A reference to a UWS result.
+
+ Attributes:
+ id (str): The identifier of the result.
+ type (XlinkType): The xlink type of the result.
+ href (str): The link to the result.
+ size (int): The size of the result in bytes.
+ mime_type (str): The MIME type of the result.
+ """
+
+ id: str = attr()
+
+ # attributeGroup uws:reference
+ type: Optional[XlinkType] = attr(ns="xlink", default=XlinkType.SIMPLE)
+ href: Optional[str] = attr(ns="xlink", default=None)
+
+ size: Optional[int] = attr(default=None)
+ mime_type: Optional[str] = attr(name="mime-type", default=None)
+
+ any_attrs: Optional[Dict[str, str]] = None
+
+
+class Results(BaseXmlModel, tag="results", ns="uws", nsmap=NSMAP):
+ """The element returned for /{jobs}/{job-id}/results
+
+ Elements:
+ result list[ResultReference]: a list of references to UWS results.
+ """
+
+ results: Optional[list[ResultReference]] = element(name="result", default_factory=list)
+
+
+class ShortJobDescription(BaseXmlModel, tag="jobref", ns="uws", nsmap=NSMAP):
+ """A short description of a job."""
+
+ # pylint: disable = no-self-argument
+
+ phase: ExecutionPhase = element()
+ run_id: Optional[str] = element(tag="runId", default=None)
+ owner_id: Optional[str] = element(tag="ownerId", default=None, nillable=True)
+ creation_time: Optional[UTCTimestamp] = element(tag="creationTime", default=None)
+
+ job_id: str = attr(name="id")
+ type: Optional[XlinkType] = attr(ns="xlink", default=XlinkType.SIMPLE)
+ href: Optional[str] = attr(ns="xlink", default=None)
+
+
+class Jobs(BaseXmlModel, tag="jobs", ns="uws", nsmap=NSMAP):
+ """The list of job references returned at /(jobs)
+
+ The list presented may be affected by the current security context and may be filtered
+
+ Elements:
+ job list(Job): a list of UWS Jobs.
+
+ Attributes:
+ version (UWSVersion): The version of the UWS standard that the server complies with.
+ Note that this attribute is actually required by the 1.1 specification - however remains
+ optional in the schema for backwards compatibility.
+ It will be formally required in the next major revision.
+ """
+
+ jobref: Optional[list[ShortJobDescription]] = element(name="jobref", default_factory=list)
+
+ version: Optional[UWSVersion] = attr(default=UWSVersion.V1_1)
+
+
+# pylint: disable=invalid-name
+ParametersType = TypeVar("ParametersType", bound=Parameters)
+
+
+class JobSummary(BaseXmlModel, Generic[ParametersType], tag="job", ns="uws", nsmap=NSMAP):
+ """The complete representation of the state of a job
+
+ Elements:
+ job_id (JobIdentifier, str): The identifier for the job.
+ run_id (str): This is a client supplied identifier - the UWS system does nothing other than to
+ return it as part of the description of the job
+ owner_id (str): The owner (creator) of the job - this should be expressed as a string that can be
+ parsed in accordance with IVOA security standards. If there was no authenticated
+ job creator then this should be set to NULL.
+ phase (ExecutionPhase): The execution phase - returned at /{jobs}/{job-id}/phase
+ quote (datetime): A Quote predicts when the job is likely to complete - returned at
+ /{jobs}/{job-id}/quote
+ "don't know" is encoded by setting to the XML null value xsi:nil="true"
+ creation_time (datetime): The instant at which the job was created.
+ Note that the version 1.1 of the specification requires that this element
+ be present.
+ It is optional only in versions 1.x of the schema for backwards compatibility.
+ 2.0+ versions of the schema will make this formally mandatory in an XML sense.
+ start_time (datetime): The instant at which the job started execution.
+ end_time (datetime): The instant at which the job finished execution.
+ execution_duration (timedelta): The duration (in seconds) for which the job should be allowed to run - a value of 0
+ is intended to mean unlimited - returned at /{jobs}/{job-id}/executionduration
+ destruction (datetime): The time at which the whole job + records + results will be destroyed.
+ Returned at /{jobs}/{job-id}/destruction
+ parameters (Parameters): The parameters to the job (where appropriate) can also be retrieved at
+ /{jobs}/{job-id}/parameters
+ results (Results): The results for the job - can also be retrieved at /{jobs}/{job-id}/results
+ error_summary (ErrorSummary): A short summary of an error
+ job_info (Any): This is arbitrary information that can be added to the job description by the UWS
+ implementation.
+
+ Attributes:
+ version: (UWSVersion) Note that this attribute is actually required by the 1.1 specification - however remains
+ optional in the schema for backwards compatibility.
+ It will be formally required in the next major revision.
+ """
+
+ # pylint: disable = no-self-argument
+ # pylint: disable = too-few-public-methods
+
+ job_id: str = element(tag="jobId")
+ run_id: Optional[str] = element(tag="runId", default=None)
+ owner_id: Optional[str] = element(tag="ownerId", default=None, nillable=True)
+ phase: ExecutionPhase = element(tag="phase")
+ quote: Optional[UTCTimestamp] = element(tag="quote", default=None, nillable=True)
+ creation_time: Optional[UTCTimestamp] = element(tag="creationTime", default=None)
+ start_time: Optional[UTCTimestamp] = element(tag="startTime", default=None, nillable=True)
+ end_time: Optional[UTCTimestamp] = element(tag="endTime", default=None, nillable=True)
+ execution_duration: Optional[int] = element(tag="executionDuration", default=0)
+ destruction: Optional[UTCTimestamp] = element(tag="destruction", default=None, nillable=True)
+ parameters: Optional[ParametersType] = element(tag="parameters", default=None)
+ results: Optional[Results] = element(tag="results", default=None)
+ error_summary: Optional[ErrorSummary] = element(tag="errorSummary", default=None)
+ job_info: Optional[list[str]] = element(tag="jobInfo", default=[])
+
+ version: Optional[UWSVersion] = attr(default=UWSVersion.V1_1)
+
+ class Config:
+ """JobSummary pydantic config options"""
+
+ arbitrary_types_allowed = True
+
+
+class Job(JobSummary, tag="job"):
+ """This is the information that is returned when a GET is made for a single job resource - i.e. /{jobs}/{job-id}"""
diff --git a/vo_models/xml/uws/types.py b/vo_models/xml/uws/types.py
new file mode 100644
index 0000000..bef7b5b
--- /dev/null
+++ b/vo_models/xml/uws/types.py
@@ -0,0 +1,47 @@
+"""UWS Simple Types"""
+
+from enum import Enum
+
+
+class ErrorType(str, Enum):
+ """Enum for error types."""
+
+ TRANSIENT = "transient"
+ FATAL = "fatal"
+
+class UWSVersion(str, Enum):
+ """The version of the UWS standard that the server complies with."""
+
+ V1_1 = "1.1"
+ V1_0 = "1.0"
+
+class ExecutionPhase(str, Enum):
+ """Enumeration of possible phases of job execution
+
+ PENDING: The first phase a job is entered into - this is where a job is being set up but no request to run
+ has occurred.
+ QUEUED: A job has been accepted for execution but is waiting in a queue.
+ EXECUTING: A job is running
+ COMPLETED: A job has completed successfully.
+ ERROR: Some form of error has occurred.
+ UNKNOWN: The job is in an unknown state.
+ HELD: The job is HELD pending execution and will not automatically be executed - can occur after a
+ PHASE=RUN request has been made (cf PENDING).
+ SUSPENDED: The job has been suspended by the system during execution.
+ ABORTED: The job has been aborted, either by user request or by the server because of lack or overuse of
+ resources.
+ ARCHIVED: The job has been archived by the server at destruction time. An archived job
+ may have deleted the results to reclaim resources, but must have job metadata preserved.
+ This is an alternative that the server may choose in contrast to completely destroying
+ all record of the job.
+ """
+ PENDING = "PENDING"
+ QUEUED = "QUEUED"
+ EXECUTING = "EXECUTING"
+ COMPLETED = "COMPLETED"
+ ERROR = "ERROR"
+ UNKNOWN = "UNKNOWN"
+ HELD = "HELD"
+ SUSPENDED = "SUSPENDED"
+ ABORTED = "ABORTED"
+ ARCHIVED = "ARCHIVED"
diff --git a/vo_models/xml/voresource/__init__.py b/vo_models/xml/voresource/__init__.py
new file mode 100644
index 0000000..5f7879b
--- /dev/null
+++ b/vo_models/xml/voresource/__init__.py
@@ -0,0 +1,3 @@
+"""
+Module containing VOResource classes.
+"""
diff --git a/vo_models/xml/voresource/types.py b/vo_models/xml/voresource/types.py
new file mode 100644
index 0000000..c462f50
--- /dev/null
+++ b/vo_models/xml/voresource/types.py
@@ -0,0 +1,96 @@
+"""VOResource Simple Types"""
+
+import re
+from datetime import datetime
+
+from pydantic import GetCoreSchemaHandler
+from pydantic_core import CoreSchema, core_schema
+
+
+class UTCTimestamp(datetime):
+ """A subclass of datetime to allow expanded handling of ISO formatted datetimes, and enforce
+ the use of a Z identifier for UTC timezone in outputs
+
+ """
+
+ # This is the strict regex definition for VO datetimes from:
+ # https://www.ivoa.net/documents/VOResource/20180625/REC-VOResource-1.1.html#tth_sEc2.2.4
+ # vodt_regex = r"\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?Z?"
+
+ # Expanded regex to accept Zulu but also +00:00 offset UTC times
+ exp_vodt_regex = r"(\d{4}-\d\d-\d\d(T|\s)\d\d:\d\d:\d\d(\.\d+)?)(Z|\+\d\d:\d\d)?"
+ # Will match:
+ # 2023-03-15T18:27:18.758 (UTC assumed - T separator)
+ # 2023-03-15 18:27:18.758 (UTC assumed - space separator)
+ # 2023-03-15T18:27:18.758Z (Zulu UTC - T separator)
+ # 2023-03-15 18:27:18.758Z (Zulu UTC - space separator)
+ # 2023-03-15T18:27:18.758+00:00 (UTC w/ offset - T separator)
+ # 2023-03-15 18:27:18.758+00:00 (UTC w/ offset - space separator)
+
+ # TODO: Python 3.11 datetime.fromisoformat() does accept a 'Z' indicated UTC time. Revisit this when upgrading.
+
+ vodt_regex_match = re.compile(exp_vodt_regex)
+
+ def __str__(self) -> str:
+ return self.isoformat(sep="T", timespec="milliseconds")
+
+ def _serialize(self) -> str:
+ return self.isoformat(sep="T", timespec="milliseconds")
+
+ # pylint: disable=unused-argument
+ @classmethod
+ def __get_pydantic_core_schema__(cls, source_type, handler: GetCoreSchemaHandler) -> CoreSchema:
+ return core_schema.no_info_after_validator_function(
+ cls._validate,
+ core_schema.datetime_schema(),
+ serialization=core_schema.plain_serializer_function_ser_schema(
+ cls._serialize,
+ info_arg=False,
+ return_schema=core_schema.str_schema(),
+ ),
+ )
+
+ @classmethod
+ def _validate(cls, value: str):
+ """Validator that expands the pydantic datetime model to include Z UTC identifiers
+
+ Args:
+ value (str): datetime string. Comes from either a user's POST (destruction) or from the cache
+
+ Returns:
+ VODateTime: VO-compliant datetime subclass
+ """
+
+ if isinstance(value, UTCTimestamp):
+ return value
+
+ if isinstance(value, datetime):
+ return cls._validate(value.isoformat())
+
+ if not isinstance(value, str):
+ raise TypeError("String datetime required")
+
+ value = value.upper()
+
+ valid_vodt = cls.vodt_regex_match.fullmatch(value)
+ if not valid_vodt:
+ # If there was no full match, reject it
+ raise ValueError("Invalid VOResource ISO-8601 date format")
+
+ # Grab only the date/time match and manually add a UTC offset for an aware python datetime object
+ value = valid_vodt.group(1) + "+00:00"
+
+ return super().fromisoformat(value)
+
+ @classmethod
+ def fromisoformat(cls, date_string):
+ return cls._validate(date_string)
+
+ def isoformat(self, sep: str = "T", timespec: str = "milliseconds") -> str:
+ """Overwrites the datetime isoformat output to use a Z UTC indicator
+
+ Returns:
+ str: VO-compliant ISO-8601 datetime string
+ """
+ iso_dt = super().isoformat(sep=sep, timespec=timespec)
+ return iso_dt.replace("+00:00", "Z")
diff --git a/vo_models/xml/xlink/__init__.py b/vo_models/xml/xlink/__init__.py
new file mode 100644
index 0000000..76e7b86
--- /dev/null
+++ b/vo_models/xml/xlink/__init__.py
@@ -0,0 +1,5 @@
+"""Module containing pydantic-xml models for XML Linking Language (XLink)
+See: https://www.w3.org/TR/xlink11/
+
+Note: Only implements the simple type TypeValue, used in UWS Job models."""
+from vo_models.xml.xlink.xlink import XlinkType
diff --git a/vo_models/xml/xlink/xlink.py b/vo_models/xml/xlink/xlink.py
new file mode 100644
index 0000000..ca7ded2
--- /dev/null
+++ b/vo_models/xml/xlink/xlink.py
@@ -0,0 +1,13 @@
+"""Simple types for xlink schema"""
+from enum import Enum
+
+
+class XlinkType(str, Enum):
+ """xlink 'type' values"""
+
+ SIMPLE = "simple"
+ EXTENDED = "extended"
+ LOCATOR = "locator"
+ ARC = "arc"
+ RESOURCE = "resource"
+ TITLE = "title"