diff --git a/.github/actions/changelog/index.js b/.github/actions/changelog/index.js index d72726572..a80432356 100644 --- a/.github/actions/changelog/index.js +++ b/.github/actions/changelog/index.js @@ -6,16 +6,14 @@ const readline = require("readline"); const check_author = (author, authors) => { if ( - typeof author === "object" && - "email" in author && - !authors.includes(author.email) + typeof author === "object" ) { - return "- " + author.name + " <" + author.email + ">\n"; + return "- " + author.name + "\n"; } else if ( typeof author === "string" && !authors.includes(author.split(/[<>]/)[1]) ) { - return "- " + author + "\n"; + return "- " + author.split(/[<>]/)[1].trim() + "\n"; } else { return ""; } diff --git a/.github/workflows/boilerplate.yml b/.github/workflows/boilerplate.yml index 13a2f111a..8b2732fd3 100644 --- a/.github/workflows/boilerplate.yml +++ b/.github/workflows/boilerplate.yml @@ -28,7 +28,7 @@ jobs: fetch-depth: 0 - name: Get changed files id: changed-files - uses: tj-actions/changed-files@v31 + uses: tj-actions/changed-files@v41 with: files: | **/*.py @@ -85,9 +85,26 @@ jobs: # See the License for the specific language governing permissions and # limitations under the License. + boilerplate2024: |- + # Copyright 2024 Agnostiq Inc. + # + # This file is part of Covalent. + # + # Licensed under the Apache License 2.0 (the "License"). A copy of the + # License may be obtained with this software package or at + # + # https://www.apache.org/licenses/LICENSE-2.0 + # + # Use of this file is prohibited except in compliance with the License. + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + run: | for file in ${{ steps.changed-files.outputs.all_changed_files }}; do - if [[ ! $( cat $file | tr -d '\r' ) =~ "$boilerplate2021" && ! $( cat $file | tr -d '\r' ) =~ "$boilerplate2022" && ! $( cat $file | tr -d '\r' ) =~ "$boilerplate2023" ]] ; then + if [[ ! $( cat $file | tr -d '\r' ) =~ "$boilerplate2021" && ! $( cat $file | tr -d '\r' ) =~ "$boilerplate2022" && ! $( cat $file | tr -d '\r' ) =~ "$boilerplate2023" && ! $( cat $file | tr -d '\r' ) =~ "$boilerplate2024" ]] ; then printf "Boilerplate is missing from $file.\n" printf "The first 15 lines of $file are\n\n" cat $file | tr -d '\r' | cat -ET | head -n 15 diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index 9a3589671..046d3318a 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -32,6 +32,7 @@ jobs: uses: actions/checkout@v4 with: token: ${{ secrets.COVALENT_OPS_BOT_TOKEN }} + - uses: dorny/paths-filter@v2 id: filter with: @@ -42,6 +43,7 @@ jobs: - '.github/actions/changelog/action.yml' dist: - '.github/actions/changelog/dist/**' + - name: Latest tag id: get-latest-tag uses: ./.github/actions/describe @@ -49,6 +51,7 @@ jobs: token: ${{ secrets.COVALENT_OPS_BOT_TOKEN }} branch: develop stable: false + - name: Update version number id: changelog uses: ./.github/actions/changelog @@ -57,6 +60,7 @@ jobs: version-path: VERSION token: ${{ secrets.COVALENT_OPS_BOT_TOKEN }} basehead: ${{ steps.get-latest-tag.outputs.tag }}...${{ github.sha }} + - name: Commit if: ${{ steps.changelog.outputs.message != 'noop' }} uses: EndBug/add-and-commit@v9 diff --git a/.github/workflows/man_0_assign_version.yml b/.github/workflows/man_0_assign_version.yml new file mode 100644 index 000000000..8863caf16 --- /dev/null +++ b/.github/workflows/man_0_assign_version.yml @@ -0,0 +1,52 @@ +# Copyright 2024 Agnostiq Inc. +# +# This file is part of Covalent. +# +# Licensed under the Apache License 2.0 (the "License"). A copy of the +# License may be obtained with this software package or at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Use of this file is prohibited except in compliance with the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: man_0_assign_version + +on: + workflow_dispatch: + inputs: + nightly_tests_failed: + type: boolean + required: true + default: true + description: "WARNING: Make sure the `nightly-tests` or the most recent `tests` workflow has passed successfully in develop before running this workflow. + Uncheck this box if it has." + + workflow_call: + inputs: + nightly_tests_failed: + type: boolean + required: true + default: false + +permissions: + id-token: write + contents: read + +jobs: + license: + name: License Scanner + uses: ./.github/workflows/license.yml + + version_assigner: + name: Assign Version + needs: + - license + if: > + !inputs.nightly_tests_failed + uses: ./.github/workflows/changelog.yml + secrets: inherit # pragma: allowlist secret diff --git a/.github/workflows/man_1_push_to_master.yml b/.github/workflows/man_1_push_to_master.yml new file mode 100644 index 000000000..80db54dc3 --- /dev/null +++ b/.github/workflows/man_1_push_to_master.yml @@ -0,0 +1,87 @@ +# Copyright 2024 Agnostiq Inc. +# +# This file is part of Covalent. +# +# Licensed under the Apache License 2.0 (the "License"). A copy of the +# License may be obtained with this software package or at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Use of this file is prohibited except in compliance with the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: man_1_push_to_master + +on: + workflow_dispatch: + inputs: + assign_version_failed: + type: boolean + required: true + default: true + description: "WARNING: Make sure the `man_0_assign_version` workflow has passed successfully before running this workflow. + Uncheck this box if it has." + + workflow_call: + inputs: + assign_version_failed: + type: boolean + required: true + default: false + +permissions: + id-token: write + contents: read + +jobs: + push_to_master: + name: Push develop to master + runs-on: ubuntu-latest + outputs: + release: ${{ steps.push.outputs.release }} + + steps: + - name: Checkout develop + uses: actions/checkout@v4 + with: + persist-credentials: false + fetch-depth: 0 + + - name: Get versions of develop and master + id: get-versions + run: | + develop_version="$(cat ./VERSION)" + master_version="$(git show origin/master:VERSION)" + echo "::set-output name=develop_version::${develop_version}" + echo "::set-output name=master_version::${master_version}" + + - name: Perform the push to master if develop is ahead + id: push + if: > + !inputs.assign_version_failed + run: | + DEVELOP_VERSION="${{ steps.get-versions.outputs.develop_version }}" + MASTER_VERSION="${{ steps.get-versions.outputs.master_version }}" + release=false + echo "Develop version: ${DEVELOP_VERSION}" + echo "Master version: ${MASTER_VERSION}" + if [[ "${DEVELOP_VERSION}" == "${MASTER_VERSION}" ]]; then + echo "No new version detected. Exiting." + exit 1 + elif dpkg --compare-versions $DEVELOP_VERSION 'gt' $MASTER_VERSION ; then + echo "Pushing to master." + git config user.name "CovalentOpsBot" + git config user.email "covalentopsbot@users.noreply.github.com" + git remote set-url origin https://${{ secrets.COVALENT_OPS_BOT_TOKEN }}@github.com/AgnostiqHQ/covalent.git + git push origin HEAD:master + release=true + else + echo "This means the version on develop is lower than the version on master or something is wrong." + exit 1 + fi + echo "Ready to release: ${release}" + echo "::set-output name=release::$release" diff --git a/.github/workflows/man_2_create_prerelease.yml b/.github/workflows/man_2_create_prerelease.yml new file mode 100644 index 000000000..c6c214870 --- /dev/null +++ b/.github/workflows/man_2_create_prerelease.yml @@ -0,0 +1,73 @@ +# Copyright 2024 Agnostiq Inc. +# +# This file is part of Covalent. +# +# Licensed under the Apache License 2.0 (the "License"). A copy of the +# License may be obtained with this software package or at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Use of this file is prohibited except in compliance with the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: man_2_create_prerelease + +on: + workflow_dispatch: + inputs: + push_to_master_failed: + type: boolean + required: true + default: true + description: "WARNING: Make sure the `man_1_push_to_master` workflow has passed successfully before running this workflow. + Uncheck this box if it has." + + workflow_call: + inputs: + push_to_master_failed: + type: boolean + required: true + default: false + +permissions: + id-token: write + contents: read + +jobs: + create_release: + name: Create a Prerelease + uses: ./.github/workflows/release.yml + if: > + !inputs.push_to_master_failed + with: + prerelease: true + secrets: inherit # pragma: allowlist secret + + notify_slack: + name: Notify on Slack + needs: + - create_release + runs-on: ubuntu-latest + steps: + - name: Checkout master + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch_name }} + + - name: Format Slack message + run: | + VERSION="$(cat ./VERSION)" + SLACK_MSG=":rocket: Version $VERSION is now available." + echo "SLACK_MSG=$SLACK_MSG" >> $GITHUB_ENV + + - name: Notify Slack + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: "covalent-ci" + SLACK_USERNAME: "CovalentOpsBot" + SLACK_MESSAGE: ${{ env.SLACK_MSG }} + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} diff --git a/.github/workflows/nightly-tests.yml b/.github/workflows/nightly-tests.yml new file mode 100644 index 000000000..df31afce7 --- /dev/null +++ b/.github/workflows/nightly-tests.yml @@ -0,0 +1,66 @@ +# Copyright 2024 Agnostiq Inc. +# +# This file is part of Covalent. +# +# Licensed under the Apache License 2.0 (the "License"). A copy of the +# License may be obtained with this software package or at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Use of this file is prohibited except in compliance with the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: nightly-tests + +on: + schedule: + - cron: "0 0 * * *" + + workflow_dispatch: + +permissions: + id-token: write + contents: read + +jobs: + license: + name: License Scanner + uses: ./.github/workflows/license.yml + + tests: + name: Unit and Functional Tests + needs: + - license + uses: ./.github/workflows/tests.yml + secrets: inherit # pragma: allowlist secret + + assign_version: + name: Assign Version + needs: + - tests + uses: ./.github/workflows/man_0_assign_version.yml + secrets: inherit # pragma: allowlist secret + with: + nightly_tests_failed: false + + push_to_master: + name: Push to Master + needs: + - assign_version + uses: ./.github/workflows/man_1_push_to_master.yml + secrets: inherit # pragma: allowlist secret + with: + assign_version_failed: false + + create_prerelease: + name: Create a Prerelease + needs: + - push_to_master + uses: ./.github/workflows/man_2_create_prerelease.yml + secrets: inherit # pragma: allowlist secret + with: + push_to_master_failed: false diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml deleted file mode 100644 index 539c4c581..000000000 --- a/.github/workflows/nightly.yml +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright 2021 Agnostiq Inc. -# -# This file is part of Covalent. -# -# Licensed under the Apache License 2.0 (the "License"). A copy of the -# License may be obtained with this software package or at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Use of this file is prohibited except in compliance with the License. -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: nightly - -on: - schedule: - - cron: "*/10 * * * *" - -permissions: - id-token: write - contents: read - -jobs: - license: - name: License Scanner - uses: AgnostiqHQ/covalent/.github/workflows/license.yml@develop - - tests: - name: Unit and Functional Tests - uses: AgnostiqHQ/covalent/.github/workflows/tests.yml@develop - secrets: inherit # pragma: allowlist secret - - changelog: - name: Assign Version - needs: - - license - - tests - uses: AgnostiqHQ/covalent/.github/workflows/changelog.yml@develop - secrets: inherit # pragma: allowlist secret - - push_to_master: - name: Push to Master - runs-on: ubuntu-latest - needs: changelog - outputs: - release: ${{ steps.push.outputs.release }} - steps: - - name: Get latest release - id: query-tags - uses: octokit/request-action@v2.x - with: - route: GET /repos/AgnostiqHQ/covalent/tags - env: - GITHUB_TOKEN: ${{ secrets.COVALENT_OPS_BOT_TOKEN }} - - - name: Parse latest tag - id: get-latest-tag - run: | - # This assumes that the response from the API is sorted in decreasing order (thus the first element is the latest tag) - latest_tag=${{ fromJSON(steps.query-tags.outputs.data)[0].name }} - echo "::set-output name=tag::${latest_tag}" - - - name: Checkout default branch - uses: actions/checkout@v4 - with: - fetch-depth: 0 - persist-credentials: false - - - name: Push to master - id: push - run: | - CHANGELOG_VERSION="${{ needs.changelog.outputs.version }}" - MASTER_VERSION="$(echo ${{ steps.get-latest-tag.outputs.tag }} | cut -c2- )" - VERSION="$(cat ./VERSION)" - release=false - if [ "$MASTER_VERSION" = "$VERSION" ] ; then - echo "$VERSION has been previously released." - elif dpkg --compare-versions $VERSION 'gt' '0.177.0' ; then - git config user.name "CovalentOpsBot" - git config user.email "covalentopsbot@users.noreply.github.com" - git remote set-url origin https://${{ secrets.COVALENT_OPS_BOT_TOKEN }}@github.com/AgnostiqHQ/covalent.git - git push origin HEAD:master - release=true - else - echo "We cannot release versions less than 0.177.0." - fi - echo "::set-output name=release::$release" - - release: - name: Create Release - needs: push_to_master - if: needs.push_to_master.outputs.release == 'true' - uses: AgnostiqHQ/covalent/.github/workflows/release.yml@develop - with: - prerelease: true - secrets: inherit # pragma: allowlist secret - - notify: - name: Notify Slack - needs: release - runs-on: ubuntu-latest - steps: - - name: Checkout master - uses: actions/checkout@v4 - with: - ref: "master" - - - name: Format Slack message - run: | - VERSION="$(cat ./VERSION)" - SLACK_MSG=":rocket: Version $VERSION is now available." - echo "SLACK_MSG=$SLACK_MSG" >> $GITHUB_ENV - - - name: Notify Slack - uses: rtCamp/action-slack-notify@v2 - env: - SLACK_CHANNEL: "covalent-ci" - SLACK_USERNAME: "CovalentOpsBot" - SLACK_MESSAGE: ${{ env.SLACK_MSG }} - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} - - executor_base_images: - name: Build Executor base images - runs-on: ubuntu-latest - needs: release - strategy: - matrix: - repo: - [ - "AgnostiqHQ/covalent-aws-plugins", - "AgnostiqHQ/covalent-awslambda-plugin", - "AgnostiqHQ/covalent-braket-plugin", - ] - steps: - - name: Build executor_base_images - uses: peter-evans/repository-dispatch@v2 - with: - token: ${{ secrets.COVALENT_OPS_BOT_TOKEN }} - repo: ${{ matrix.repo }} - event-type: "prerelease" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2e5b1da83..a928b35d5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -20,7 +20,7 @@ on: workflow_dispatch: inputs: stable_version: - description: "Stable version number, e.g. 0.32.3" + description: "Stable version number, e.g. 0.32.3. Mandatory if a stable release is being created." type: string test_release: description: "Test the workflow but don't create the release. Uncheck this box to create a release." @@ -30,15 +30,10 @@ on: workflow_call: inputs: prerelease: - description: "true: Create a prerelease. false: Create a stable release" + description: "true: Create a prerelease. false: Create a stable release." required: true type: boolean default: true - prerelease_version: - description: "The prerelease version to release" - required: false - type: string - default: "master" permissions: id-token: write @@ -50,9 +45,8 @@ env: '"AlejandroEsquivel",' '"FyzHsn",' '"wjcunningham7",' + '"kessler-frost",' '"santoshkumarradha"]' - EXECUTOR_BASE_DOCKERFILE_URL: "https://raw.githubusercontent.com/AgnostiqHQ/covalent-aws-plugins/develop/Dockerfile?token=${{ secrets.COVALENT_OPS_BOT_TOKEN }}" - AWS_PLUGINS_VERSION_URL: "https://raw.githubusercontent.com/AgnostiqHQ/covalent-aws-plugins/develop/VERSION?token=${{ secrets.COVALENT_OPS_BOT_TOKEN }}" jobs: github: @@ -61,38 +55,20 @@ jobs: release: ${{ env.RELEASE }} steps: - name: Check out stable release tag - uses: actions/checkout@v2 + uses: actions/checkout@v4 if: github.event.inputs.stable_version with: persist-credentials: false fetch-depth: 0 ref: "v${{ github.event.inputs.stable_version }}" - - name: Format prerelease ref - if: inputs.prerelease - run: | - re='^[0-9]+$' - IFS='.' read -ra version <<< "${{ inputs.prerelease_version }}" - if [[ ${version[0]} =~ $re ]] ; then - echo "PRERELEASE=v$version" >> $GITHUB_ENV - else - echo "PRERELEASE=$version" >> $GITHUB_ENV - fi - - name: Check out prerelease tag - uses: actions/checkout@v2 + + - name: Check out master branch for prerelease + uses: actions/checkout@v4 if: inputs.prerelease with: persist-credentials: false fetch-depth: 0 - ref: "${{ env.PRERELEASE }}" - - name: Generate stable release message - if: > - github.event.inputs.stable_version - && contains(env.PAUL_BLART, github.actor) - id: stable-changelog - uses: AgnostiqHQ/covalent/.github/actions/stable-changelog@develop - with: - changelog-path: CHANGELOG.md - version-path: VERSION + ref: "master" - name: Read version run: | @@ -104,6 +80,27 @@ jobs: VERSION="$(cat ./VERSION)" echo "VERSION=$VERSION" >> $GITHUB_ENV echo "RELEASE=v$VERSION" >> $GITHUB_ENV + + - name: Tag commit for prerelease + if: inputs.prerelease + id: push + run: | + git config user.name "CovalentOpsBot" + git config user.email "covalentopsbot@users.noreply.github.com" + git tag -a $RELEASE -m "Release $RELEASE" + git remote set-url origin https://${{ secrets.COVALENT_OPS_BOT_TOKEN }}@github.com/AgnostiqHQ/covalent.git + git push origin $RELEASE + + - name: Generate stable release message + if: > + github.event.inputs.stable_version + && contains(env.PAUL_BLART, github.actor) + id: stable-changelog + uses: AgnostiqHQ/covalent/.github/actions/stable-changelog@develop + with: + changelog-path: CHANGELOG.md + version-path: VERSION + - name: Generate prerelease message if: inputs.prerelease id: message @@ -114,26 +111,20 @@ jobs: echo 'MESSAGE<> $GITHUB_ENV tail +$begin ./CHANGELOG.md | head -$end >> $GITHUB_ENV echo 'EOF' >> $GITHUB_ENV - - name: Tag commit - id: push - continue-on-error: true - run: | - git config user.name "CovalentOpsBot" - git config user.email "covalentopsbot@users.noreply.github.com" - git tag -a $RELEASE -m "Release $RELEASE" - git remote set-url origin https://${{ secrets.COVALENT_OPS_BOT_TOKEN }}@github.com/AgnostiqHQ/covalent.git - git push origin $RELEASE + - name: Create prerelease if: >- inputs.prerelease + && ${{ steps.push.outcome == 'success' && steps.message.outcome == 'success' - && (!github.event.inputs.test_release || github.event.inputs.test_release == 'false') + && (!github.event.inputs.test_release || github.event.inputs.test_release == 'false') }} uses: ncipollo/release-action@v1 with: body: ${{ env.MESSAGE }} token: ${{ secrets.COVALENT_OPS_BOT_TOKEN }} tag: ${{ env.RELEASE }} - prerelease: true + prerelease: ${{ inputs.prerelease }} + - name: Create stable release if: >- github.event.inputs.stable_version @@ -145,6 +136,7 @@ jobs: body: ${{ steps.stable-changelog.outputs.message }} token: ${{ secrets.COVALENT_OPS_BOT_TOKEN }} tag: ${{ env.RELEASE }} + - name: Alert Slack if: failure() uses: rtCamp/action-slack-notify@v2 @@ -163,36 +155,43 @@ jobs: version: ${{ steps.validate.outputs.version }} steps: - name: Check out release tag - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: persist-credentials: false fetch-depth: 0 ref: ${{ needs.github.outputs.release }} + - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.8 + - name: Install Python dependencies run: | python -m pip install --upgrade pip pip install twine + - name: Set up Node uses: actions/setup-node@v2 with: node-version: 16 + - name: Build Webapp run: | cd ./covalent_ui/webapp yarn install yarn build + - name: Build Stable or Pre-Release Distribution id: pre-or-stable-build run: python setup.py sdist + - name: Transform semver version to pep440 id: version-transform uses: ./.github/actions/version-transform with: version-path: VERSION + - name: Validate Distribution id: validate run: | @@ -211,6 +210,7 @@ jobs: diff -r covalent-${VERSION}/covalent_ui/webapp/build ../covalent_ui/webapp/build rm -rf covalent-${VERSION}/ echo "::set-output name=version::$VERSION" + - name: Upload Distribution if: > steps.pre-or-stable-build.outcome == 'success' @@ -220,43 +220,7 @@ jobs: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} run: twine upload dist/* - - name: Alert Slack - if: failure() - uses: rtCamp/action-slack-notify@v2 - env: - SLACK_CHANNEL: "covalent-ci" - SLACK_USERNAME: "CovalentOpsBot" - SLACK_MESSAGE: "The release.yml workflow is failing in ${{ github.ref }}!" - SLACK_COLOR: ${{ job.status }} - SLACK_TITLE: ":warning: Attention Required :warning:" - SLACK_WEBHOOK: ${{ secrets.SLACK_ALERT_WEBHOOK }} - conda: - needs: pypi - runs-on: ubuntu-latest - continue-on-error: true - strategy: - fail-fast: false - matrix: - python-version: - - "3.8" - - "3.9" - - "3.10" - steps: - - name: Check for stable release - if: github.event.inputs.stable_version && !inputs.prerelease - run: echo "STABLE=true" >> $GITHUB_ENV - - name: Conda skeleton publish - uses: AgnostiqHQ/conda-skeleton-publish@main - if: ${{ !github.event.inputs.test_release }} - with: - pypi_package: "covalent" - python_version: ${{ matrix.python-version }} - upload_channel: "agnostiq" - access_token: ${{ secrets.ANACONDA_TOKEN }} - package_version: ${{ needs.pypi.outputs.version }} - stable: ${{ env.STABLE }} - wait: true - name: Alert Slack if: failure() uses: rtCamp/action-slack-notify@v2 @@ -267,128 +231,3 @@ jobs: SLACK_COLOR: ${{ job.status }} SLACK_TITLE: ":warning: Attention Required :warning:" SLACK_WEBHOOK: ${{ secrets.SLACK_ALERT_WEBHOOK }} - - docker: - runs-on: ubuntu-latest - steps: - - name: Check out release tag - uses: actions/checkout@v2 - if: inputs.stable_version - with: - persist-credentials: false - fetch-depth: 0 - ref: "v${{ github.event.inputs.stable_version }}" - - - name: Check out master - uses: actions/checkout@v2 - if: inputs.prerelease - with: - persist-credentials: false - fetch-depth: 0 - - - name: Set up QEMU - uses: docker/setup-qemu-action@master - with: - platforms: "linux/amd64,linux/arm64" - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@master - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - role-to-assume: ${{ secrets.ECR_PUBLIC_UPLOAD_ROLE }} - aws-region: us-east-1 - - - name: Generate metadata - run: | - aws --version - docker info - TAG="$(cat ./VERSION)" - echo "TAG: $TAG" - echo "TAG=$TAG" >> $GITHUB_ENV - BUILD_DATE=`date -u +%Y-%m-%d` - echo "BUILD_DATE=$BUILD_DATE" >> $GITHUB_ENV - BUILD_VERSION=${{ github.sha }} - echo "BUILD_VERSION=$BUILD_VERSION" >> $GITHUB_ENV - - - name: Login to Public Registry - run: | - aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws - - - name: Build and push pre-release - if: > - inputs.prerelease - && !inputs.stable_version - && github.event_name == 'schedule' - uses: docker/build-push-action@v2 - with: - builder: ${{ steps.buildx.outputs.name }} - context: . - file: Dockerfile - platforms: linux/amd64 - cache-from: type=gha - cache-to: type=gha,mode=max - build-args: | - BUILD_DATE=${{ env.BUILD_DATE }} - BUILD_VERSION=${{ env.BUILD_VERSION }} - push: true - tags: | - public.ecr.aws/covalent/covalent:latest - public.ecr.aws/covalent/covalent:${{ env.TAG }} - - - name: Build and push stable release - if: > - github.event_name == 'workflow_dispatch' - && inputs.stable_version - && !inputs.prerelease - && !github.events.inputs.test_release - && contains(env.PAUL_BLART, github.actor) - uses: docker/build-push-action@v2 - with: - builder: ${{ steps.buildx.outputs.name }} - context: . - file: Dockerfile - platforms: linux/amd64 - cache-from: type=gha - cache-to: type=gha,mode=max - build-args: | - BUILD_DATE=${{ env.BUILD_DATE }} - BUILD_VERSION=${{ env.BUILD_VERSION }} - push: true - tags: | - public.ecr.aws/covalent/covalent:${{ inputs.stable_version }} - public.ecr.aws/covalent/covalent:stable - - # docker_aws_plugins: - # name: Trigger AWS Plugins Base Executor Image Pre-Release - # needs: pypi - # if: > - # inputs.prerelease - # && !inputs.stable_version - # && github.event_name == 'schedule' - # uses: AgnostiqHQ/covalent-aws-plugins/.github/workflows/docker.yml@develop - # with: - # prerelease: true - - # docker_aws_lambda: - # name: Trigger AWS Lambda Base Executor Image Pre-Release - # needs: pypi - # if: > - # inputs.prerelease - # && !inputs.stable_version - # && github.event_name == 'schedule' - # uses: AgnostiqHQ/covalent-awslambda-plugin/.github/workflows/docker.yml@develop - # with: - # prerelease: true - - # docker_aws_braket: - # name: Trigger AWS Braket Base Executor Image Pre-Release - # needs: pypi - # if: > - # inputs.prerelease - # && !inputs.stable_version - # && github.event_name == 'schedule' - # uses: AgnostiqHQ/covalent-braket-plugin/.github/workflows/docker.yml@develop - # with: - # prerelease: true diff --git a/.github/workflows/requirements.yml b/.github/workflows/requirements.yml index 30198b6be..2eb1b7932 100644 --- a/.github/workflows/requirements.yml +++ b/.github/workflows/requirements.yml @@ -39,7 +39,7 @@ jobs: run: python -m pip install pip-check-reqs - name: Check extra core requirements - run: pip-extra-reqs -r werkzeug -r python-multipart covalent covalent_dispatcher covalent_ui --ignore-requirement=qiskit --ignore-requirement=qiskit-ibm-provider --ignore-requirement=amazon-braket-pennylane-plugin + run: pip-extra-reqs -r python-multipart covalent covalent_dispatcher covalent_ui --ignore-requirement=qiskit --ignore-requirement=qiskit-ibm-provider --ignore-requirement=amazon-braket-pennylane-plugin - name: Check missing SDK requirements run: > @@ -50,10 +50,14 @@ jobs: --ignore-module=pkg_resources --ignore-module=covalent/_dispatcher_plugins --ignore-module=covalent/_shared_files + --ignore-file=covalent/quantum/** + --ignore-file=covalent/_workflow/q* + --ignore-file=covalent/_shared_files/q* + --ignore-file=covalent/_results_manager/q* + --ignore-file=covalent/_shared_files/pickling.py --ignore-file=covalent/executor/** --ignore-file=covalent/triggers/** --ignore-file=covalent/cloud_resource_manager/** - --ignore-file=covalent/quantum/qserver/** --ignore-file=covalent/_programmatic/** covalent diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 946e343f5..1349a02ca 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -28,9 +28,10 @@ on: workflow_dispatch: inputs: commit_sha: - description: "Commit SHA used for testing" + description: "Commit SHA used for testing. If left blank, the default branch will be used." type: string - required: true + default: '' + required: false jobs: build_test_matrix: @@ -66,8 +67,8 @@ jobs: outputs: codecov: ${{ steps.local-codecov.outputs.local_codecov }} steps: - - name: Check out head - if: github.event_name != 'workflow_dispatch' + - name: Check out the default branch + if: github.event_name != 'workflow_dispatch' || github.event.inputs.commit_sha == '' uses: actions/checkout@v4 with: persist-credentials: false @@ -90,6 +91,11 @@ jobs: sdk: - 'covalent/**' - 'tests/covalent_tests/**' + qelectron: + - 'covalent/executor/quantum_plugins/**' + - 'covalent/executor/qbase.py' + - 'covalent/quantum/**' + - 'tests/qelectron_tests/**' dispatcher: - 'covalent_dispatcher/**' - 'tests/covalent_dispatcher_tests/**' @@ -134,6 +140,7 @@ jobs: echo "NEED_PYTHON=$NEED_PYTHON" >> $GITHUB_ENV echo "NEED_FRONTEND=$NEED_FRONTEND" >> $GITHUB_ENV echo "BUILD_AND_RUN_ALL=$BUILD_AND_RUN_ALL" >> $GITHUB_ENV + echo "COVALENT_DISABLE_QELECTRON_TESTS=true" >> $GITHUB_ENV - name: Set up Python if: > @@ -159,6 +166,7 @@ jobs: run: | pip install --no-cache-dir -r ./requirements.txt pip install --no-cache-dir -r ./tests/requirements.txt + pip install --no-cache-dir -r ./requirements-qelectron.txt - name: Set up Node if: env.NEED_FRONTEND || env.BUILD_AND_RUN_ALL @@ -209,9 +217,10 @@ jobs: if: env.BUILD_AND_RUN_ALL id: covalent_start run: | + export COVALENT_ENABLE_TASK_PACKING=1 covalent db migrate if [ "${{ matrix.backend }}" = 'dask' ] ; then - COVALENT_ENABLE_TASK_PACKING=1 covalent start -d + covalent start -d elif [ "${{ matrix.backend }}" = 'local' ] ; then covalent start --no-cluster -d else @@ -252,6 +261,18 @@ jobs: if: steps.sdk-tests.outcome == 'success' run: coverage xml -o sdk_coverage.xml + - name: Run Qelectron tests and measure coverage + id: qelectron-tests + if: > + (steps.modified-files.outputs.qelectron == 'true' + || env.BUILD_AND_RUN_ALL) && env.COVALENT_DISABLE_QELECTRON_TESTS != 'true' + run: PYTHONPATH=$PWD/ pytest -vvs --reruns=5 tests/qelectron_tests/core_tests --cov=covalent_qelectron --cov-config=.coveragerc + + - name: Generate Qelectron coverage report + id: qelectron-coverage + if: steps.qelectron-tests.outcome == 'success' && env.COVALENT_DISABLE_QELECTRON_TESTS != 'true' + run: coverage xml -o qelectron_coverage.xml + - name: Run dispatcher tests and measure coverage id: dispatcher-tests if: > @@ -286,17 +307,13 @@ jobs: npm test -- --coverage --watchAll=false --maxWorkers=50% - name: Dump Covalent logs - if: > - steps.covalent_start.outcome == 'success' - && failure() run: covalent logs - name: Upload SDK report to Codecov id: upload-sdk-report if: > env.RECOMMENDED_PLATFORM - && github.event_name != 'workflow_dispatch' - && (github.event_name == 'schedule' + && (github.event_name == 'workflow_call' || steps.sdk-coverage.outcome == 'success') uses: codecov/codecov-action@v3 with: @@ -309,8 +326,7 @@ jobs: id: upload-dispatcher-report if: > env.RECOMMENDED_PLATFORM - && github.event_name != 'workflow_dispatch' - && (github.event_name == 'schedule' + && (github.event_name == 'workflow_call' || steps.dispatcher-coverage.outcome == 'success') uses: codecov/codecov-action@v3 with: @@ -323,7 +339,6 @@ jobs: id: upload-functional-report if: > env.RECOMMENDED_PLATFORM - && github.event_name != 'workflow_dispatch' && steps.functional-coverage.outcome == 'success' uses: codecov/codecov-action@v3 with: @@ -336,8 +351,7 @@ jobs: id: upload-ui-backend-report if: > env.RECOMMENDED_PLATFORM - && github.event_name != 'workflow_dispatch' - && (github.event_name == 'schedule' + && (github.event_name == 'workflow_call' || steps.ui-backend-coverage.outcome == 'success') uses: codecov/codecov-action@v3 with: @@ -350,8 +364,7 @@ jobs: id: upload-ui-frontend-report if: > env.RECOMMENDED_PLATFORM - && github.event_name != 'workflow_dispatch' - && (github.event_name == 'schedule' + && (github.event_name == 'workflow_call' || steps.ui-frontend-tests.outcome == 'success') uses: codecov/codecov-action@v3 with: @@ -364,8 +377,7 @@ jobs: id: local-codecov if: > env.RECOMMENDED_PLATFORM - && github.event_name != 'workflow_dispatch' - && github.event_name != 'schedule' + && github.event_name == 'workflow_call' run: | if ${{ steps.upload-sdk-report.outcome == 'skipped' && steps.upload-dispatcher-report.outcome == 'skipped' @@ -378,13 +390,13 @@ jobs: - name: Alert Slack if: > - github.event_name == 'schedule' + github.event_name == 'workflow_call' && failure() uses: rtCamp/action-slack-notify@v2 env: SLACK_CHANNEL: "covalent-ci" SLACK_USERNAME: "CovalentOpsBot" - SLACK_MESSAGE: "The tests.yml workflow is failing in develop!" + SLACK_MESSAGE: "The tests.yml workflow is failing in the last '${{github.event_name}}' event run!" SLACK_COLOR: ${{ job.status }} SLACK_TITLE: ":warning: Attention Required :warning:" SLACK_WEBHOOK: ${{ secrets.SLACK_ALERT_WEBHOOK }} diff --git a/.gitignore b/.gitignore index 4e5d125f3..9e53722e0 100644 --- a/.gitignore +++ b/.gitignore @@ -59,6 +59,7 @@ !pyproject.toml !requirements.txt !requirements-client.txt +!requirements-qelectron.txt !setup.py # Allow markdown etc diff --git a/CHANGELOG.md b/CHANGELOG.md index 46ed41604..2f670f14b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,32 +7,144 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [UNRELEASED] +## [0.235.1-rc.0] - 2024-06-10 + +### Authors + +- Santosh kumar <29346072+santoshkumarradha@users.noreply.github.com> +- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> +- Co-authored-by: kessler-frost + + +### Fixed + +- Ignoring all errors when importing qelectrons instead of only `ImportError` + +## [0.235.0-rc.0] - 2024-05-29 + +### Authors + +- Ara Ghukasyan <38226926+araghukas@users.noreply.github.com> +- Casey Jao + + +### Changed + +- Updated Slurm plugin docs to note possible SSH limitation +- Updated Slurm plugin docs to remove `sshproxy` section +- API base endpoint is now configurable from an environment variable +- Removed unused lattice attributes to reduce asset uploads + +### Fixed + +- Improved handling of Covalent version mismatches between client and + executor environments + +### Removed + +- Removed obsolete `migrate-pickled-result-object` command + +### Operations + +- Allow installing a specific commit sha to ease testing + +## [0.234.1-rc.0] - 2024-05-10 + +### Authors + +- Andrew S. Rosen +- Sankalp Sanand +- Co-authored-by: Alejandro Esquivel +- Casey Jao +- Co-authored-by: Santosh kumar <29346072+santoshkumarradha@users.noreply.github.com> + + +### Fixed + +- Sublattice electron function strings are now parsed correctly +- The keys of dictionary inputs to electrons no longer need be strings. +- Fixed inaccuracies in task packing exposed by no longer uploading null attributes upon dispatch. + +### Operations + +- Fixed nightly workflow's calling of other workflows. +- Fixed input values for other workflows in `nightly-tests` workflow. + +### Operations + +- Removing author email from changelog action +- Fixed nightly worfkflow's calling of other workflows. + +## [0.234.0-rc.0] - 2024-02-07 + +### Authors + +- Andrew S. Rosen (@Andrew_S_Rosen) +- Casey Jao +- Sankalp Sanand +- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> +- ArunPsiog <106462226+ArunPsiog@users.noreply.github.com> +- Co-authored-by: Ara Ghukasyan +- FilipBolt +- sriranjanivenkatesan <116076079+sriranjanivenkatesan@users.noreply.github.com> +- Co-authored-by: batchumanish +- Co-authored-by: Prasy12 +- Co-authored-by: batchumanish <β€œmanish.batchu@psiog.com”> +- Co-authored-by: batchumanish <126003896+batchumanish@users.noreply.github.com> +- Co-authored-by: Santosh kumar <29346072+santoshkumarradha@users.noreply.github.com> +- Ara Ghukasyan <38226926+araghukas@users.noreply.github.com> + +### Operations + +- Added qelectron tests to the `tests` workflow +- Split the `nightly` workflow into 4 manually triggerable workflows, `nightly-tests`, `man_0_assign_version`, `man_1_push_to_master`, and `man_2_create_prerelease` to be run in this order. +- Now only the `nightly-tests` workflow will be run on a daily basis, and the other 3 workflows will be run manually. +- Removed `conda` releases from `release.yml`. +- When pushing to `master`, now the version numbers of `develop` and `master` will be compared in `man_1_push_to_master`. +- Upgraded checkout action to v4 in `release.yml`. +- Fixing the if condition for the manual workflows. +- Added pre-release creation as part of `nightly-tests` workflow. + +### Added + +- Added CRM method to handle Python to TF value conversion (e.g. None->null, True->true, False->false). +- Added `pennylane` as a requirement in tests due to the tutorials using it + ### Changed - Updated RTD notebooks to fix their behavior - Changed the error being shown when drawing the transport graph of a lattice to a debug message instead +- Revamped README +- Reorganized `qelectron` tests +- Made qelectron an opt-in feature using `covalent[quantum]` extra ### Removed - Removed unused file transfer how to guides - Removed `pennylane` as a requirement from notebooks' requirements.txt as it comes with `covalent` +- Removed `validate_args` and `validate_region` method from `deploy_group` CLI as they were specific to AWS ### Docs - Added voice cloning tutorial +### Fixed + +- Fixed the scenario where any deploy commands would fail if the user had a non deploy compatible plugin installed +- Fixed the SQLAlchemy warning that used to show up at every fresh server start +- Fixed deploy commands' default value of plugins not being propagated to the tfvars file + ## [0.233.0-rc.0] - 2024-01-07 ### Authors -- Andrew S. Rosen +- Andrew S. Rosen (@Andrew_S_Rosen) - Casey Jao - Sankalp Sanand - Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> - ArunPsiog <106462226+ArunPsiog@users.noreply.github.com> - Co-authored-by: Ara Ghukasyan - ### Added - Added feature to use custom python files as modules to be used in the electron function @@ -50,7 +162,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Pack deps, call_before, and call_after assets into one file. - Changed handling of tuples and sets when building the transport graph - they will be converted to electron lists as well for now - `qelectron_db`, `qelectron_data_exists`, `python_version`, and `covalent_version` - are now optional in the pydantic model definitions. + are now optional in the pydantic model definitions. ### Fixed @@ -70,7 +182,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Ara Ghukasyan <38226926+araghukas@users.noreply.github.com> - Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> - ### Operations - Ignore custom executor plugin in how-to's when running `test_deploy_status` CLI test. @@ -104,7 +215,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Authors -- Andrew S. Rosen +- Andrew S. Rosen (@Andrew_S_Rosen) - Co-authored-by: Will Cunningham - Co-authored-by: Sankalp Sanand - Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> @@ -131,8 +242,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Co-authored-by: Ara Ghukasyan - Co-authored-by: Alejandro Esquivel - - ### Added - Programmatic equivalents of CLI commands `covalent start` and `covalent stop` @@ -150,7 +259,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Contributing guidelines steps for installing for the first time - Updated gitignore to ignore yarn files and folders for latest version of yarn - Fixed the bug that caused ValueError error when using KEYWORD_ONLY parameter in electron func -- Changed code at line 218 in covalent/_shared_files/utils.py +- Changed code at line 218 in covalent/\_shared_files/utils.py - Fixed usage of deprecated pydantic validation methods - Fixed qelectron_db retrieval in result object - Fixed editability of Qelectron on settings page - UI changes @@ -211,7 +320,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Authors -- Andrew S. Rosen +- Andrew S. Rosen (@Andrew_S_Rosen) - Alejandro Esquivel - Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> - Co-authored-by: mpvgithub <107603631+mpvgithub@users.noreply.github.com> @@ -270,7 +379,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Authors -- Andrew S. Rosen +- Andrew S. Rosen (@Andrew_S_Rosen) - Co-authored-by: Sankalp Sanand - Will Cunningham - Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> @@ -360,7 +469,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Authors -- Andrew S. Rosen +- Andrew S. Rosen (@Andrew_S_Rosen) - Co-authored-by: Sankalp Sanand - Will Cunningham - Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> @@ -402,7 +511,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Prasanna Venkatesh <54540812+Prasy12@users.noreply.github.com> - Co-authored-by: kamalesh.suresh - Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> -- Co-authored-by: Andrew S. Rosen +- Co-authored-by: Andrew S. Rosen (@Andrew_S_Rosen) - Faiyaz Hasan - Co-authored-by: sriranjani venkatesan - Will Cunningham @@ -423,7 +532,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Prasanna Venkatesh <54540812+Prasy12@users.noreply.github.com> - Co-authored-by: kamalesh.suresh - Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> -- Co-authored-by: Andrew S. Rosen +- Co-authored-by: Andrew S. Rosen (@Andrew_S_Rosen) - Faiyaz Hasan - Co-authored-by: sriranjani venkatesan - Will Cunningham @@ -485,7 +594,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Sankalp Sanand - Co-authored-by: kessler-frost - Faiyaz Hasan -- Andrew S. Rosen +- Andrew S. Rosen (@Andrew_S_Rosen) - Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> - Co-authored-by: Santosh kumar <29346072+santoshkumarradha@users.noreply.github.com> diff --git a/MANIFEST.in b/MANIFEST.in index 8d2699c53..71deeab82 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,7 @@ include VERSION include requirements.txt include requirements-client.txt +include requirements-qelectron.txt include covalent/py.typed recursive-include covalent/executor/ * recursive-include covalent_dispatcher/_service/ * diff --git a/README.md b/README.md index 85cf21d1b..780e10bd5 100644 --- a/README.md +++ b/README.md @@ -1,141 +1,202 @@ -  - +
+ hero +
+
- +[![version](https://img.shields.io/github/v/tag/AgnostiqHQ/covalent?color=%235552FF&include_prereleases&label=version&sort=semver)](https://github.com/AgnostiqHQ/covalent/blob/develop/CHANGELOG.md) +[![Static Badge](https://img.shields.io/badge/python-3.8_%7C_3.9_%7C_3.10-%235552FF)](#) +[![Static Badge](https://img.shields.io/badge/tests-passing-%235552FF?logo=github)](https://github.com/AgnostiqHQ/covalent/actions/workflows/tests.yml) +[![Static Badge](https://img.shields.io/badge/docs-passing-%235552FF)](https://docs.covalent.xyz/docs/) +[![Static Badge](https://img.shields.io/badge/codecov-88%25-%235552FF?logo=codecov)](https://codecov.io/gh/AgnostiqHQ/covalent) +[![apache](https://img.shields.io/badge/License-Apache_License_2.0-blue?color=%235552FF)](https://www.apache.org/licenses/LICENSE-2.0)
+ +
Run AI, ML, and Scientific Research Code on Any Cloud or On-Prem Cluster with a Single Line
+
+
+divider +   +divider +   +divider +   +divider +
+
-[![version](https://img.shields.io/github/v/tag/AgnostiqHQ/covalent?color=navy&include_prereleases&label=version&sort=semver)](https://github.com/AgnostiqHQ/covalent/blob/develop/CHANGELOG.md) -[![python](https://img.shields.io/pypi/pyversions/cova)](https://github.com/AgnostiqHQ/covalent) -[![tests](https://github.com/AgnostiqHQ/covalent/actions/workflows/tests.yml/badge.svg)](https://github.com/AgnostiqHQ/covalent/actions/workflows/tests.yml) -[![docs](https://readthedocs.org/projects/covalent/badge/?version=latest)](https://covalent.readthedocs.io/en/latest/?badge=latest) -[![codecov](https://codecov.io/gh/AgnostiqHQ/covalent/branch/master/graph/badge.svg?token=YGHCB3DE4P)](https://codecov.io/gh/AgnostiqHQ/covalent) -[![apache](https://img.shields.io/badge/License-Apache_License_2.0-blue)](https://www.apache.org/licenses/LICENSE-2.0) - +```bash +pip install covalent --upgrade +``` +Check our [Quick Start Guide](https://docs.covalent.xyz/docs/get-started/quick-start/) for setup instructions or dive into your [First Experiment](https://docs.covalent.xyz/docs/get-started/first-experiment/). Learn more on the [Concepts](https://docs.covalent.xyz/docs/user-documentation/concepts/concepts-index/). +### What is Covalent? +Covalent is a Python library for AI/ML engineers, developers, and researchers. It provides a straightforward approach to running compute jobs, like LLMs, generative AI, and scientific research, on various cloud platforms or on-prem clusters. -## πŸ€” What is Covalent? -Covalent is a Pythonic workflow tool for computational scientists, AI/ML software engineers, and anyone who needs to run experiments on limited or expensive computing resources including quantum computers, HPC clusters, GPU arrays, and cloud services. +
+Run Code Anywhere: Execute Python functions in any cloud or on-prem cluster by changing just a single line of code. +
-Covalent enables a researcher to run computation tasks on an advanced hardware platform – such as a quantum computer or serverless HPC cluster – using a single line of code. -

-Covalent Executors -

+It is as simple as swapping the decorator with our executor plugins. Choose from [existing plugins](https://docs.covalent.xyz/docs/plugin) or [create custom ones](https://github.com/AgnostiqHQ/covalent-executor-template) for tailored interactions with any infrastructure. +
+
-## πŸ’­ Why Covalent? +
+Abstraction of Infrastructure Management: Abstract the complexities of cloud consoles, terraform, or IaC in the background. +
+
+
-Covalent overcomes computational and operational challenges inherent in AI/ML experimentation. +
+Serverless Infrastructure: Automatically converts any infrastructure, including on-prem SLURM clusters or cloud compute, into a serverless setup. +
+
+
-| **Computational challenges** | **Operational challenges** | -| :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -|
  • Advanced compute hardware is expensive, and access is often limited – shared with other researchers, for example.
  • You'd like to iterate quickly, but running large models takes time.
  • Parallel computation speeds execution, but requires careful attention to data relationships.
|
  • Proliferation of models, datasets, and hardware trials.
  • Switching between development tools, including notebooks, scripts, and submission queues.
  • Tracking, repeating, and sharing results.
| -

-Covalent value -

+If you find Covalent useful or interesting, feel free to give us a ⭐ on GitHub! Your support helps us to continue developing and improving this framework. -With Covalent, you: -- Assign functions to appropriate resources: Use advanced hardware (quantum computers, HPC clusters) for the heavy lifting and commodity hardware for bookkeeping. -- Test functions on local servers before shipping them to advanced hardware. -- Let Covalent's services analyze functions for data independence and automatically parallelize them. -- Run experiments from a Jupyter notebook (or whatever your preferred interactive Python environment is). -- Track workflows and examine results in a browser-based GUI. +
+ + + + + + + + + +
For AI/ML Practitioners and DevelopersFor Researchers
+
    +
  • Robust Compute Backend: Ideal as a backend compute framework for AI/ML applications, Large Language Models (LLMs), Generative AI, and more.
  • +
  • Cloud-Agnostic Execution: Execute high-compute tasks seamlessly across different cloud environments.
  • +
  • Infrastructure Abstraction: Directly use computing resources while keeping your business code independent from the infrastructure/resource definitions.
  • +
+
+
    +
  • Local-Like Access: Effortlessly connect to compute resources from your laptop, eliminating the need for SSH or complex scripts.
  • +
  • Unified Interface Across Environments: Consistent experience with on-prem HPC clusters and cloud platforms like SLURM, PBS, LSF, AWS, GCP, Azure.
  • +
  • Real-Time Monitoring Monitoring: User-friendly UI for real-time monitoring, enabling cost-effective and iterative R&D.
  • +
+
+### Out-of-box observability - [Try out the demo](http://demo.covalent.xyz/) -## πŸ“¦ Installation +If you find Covalent useful or interesting, feel free to give us a ⭐ on GitHub! Your support helps us to continue developing and improving this framework. -Covalent is developed using Python version 3.8 on Linux and macOS. The easiest way to install Covalent is using the PyPI package manager: + +
+ video +
-```console -pip install covalent -``` -Refer to the [Quick Start](https://docs.covalent.xyz/docs/get-started/quick-start) guide for quick setup instructions, or to the [First Experiment](https://docs.covalent.xyz/docs/get-started/first-experiment) guide for a more thorough approach. For a full list of supported platforms, see the Covalent [compatibility matrix](https://docs.covalent.xyz/docs/user-documentation/compatibility). -## πŸš€ Getting started +### Explore Covalent Through Examples -Ready to try it? Go to the [First Experiment](https://docs.covalent.xyz/docs/get-started/first-experiment) guide in the documentation. +Jump right into practical examples to see Covalent in action. These tutorials cover a range of applications, giving you a hands-on experience: -For a more in-depth description of Covalent's features and how they work, see the [Concepts](https://docs.covalent.xyz/docs/user-documentation/concepts/concepts-index) page in the documentation. + - - - - - - - - - -
πŸ“š Know more !                                                        βœοΈ Tutorials and Examples                                                        
- - - [What is Covalent?](https://www.covalent.xyz/what-is-covalent/) - - [Covalent in the era of cloud-HPC](https://www.covalent.xyz/navigating-the-modern-hpc-landscape/) - - [Basic Concepts of Covalent](https://docs.covalent.xyz/docs/user-documentation/concepts/covalent-basics) - - [How does Covalent work?](#how-does-it-work) - - - - [Covalent with pytorch for classical machine learning](https://docs.covalent.xyz/docs/user-documentation/tutorials/mnist) - - [Covalent with pennylane for quantum machine learning](https://docs.covalent.xyz/docs/user-documentation/tutorials/quantumembedding) - - [Covalent with Qiskit for quantum-SVM](https://docs.covalent.xyz/docs/user-documentation/tutorials/svm/) - - [Covalent with Dask for scaling Ensemble classification](https://docs.covalent.xyz/docs/user-documentation/tutorials/ensemble/) - - [Covalent for Deep Neural Network on AWS](https://docs.covalent.xyz/docs/user-documentation/tutorials/dnn_comparison/) -
+### Explore Our Extensive Plugin Ecosystem +Covalent integrates seamlessly with a variety of platforms. Discover our range of plugins to enhance your Covalent experience: -## How Does It Work? -Covalent Architecture -Covalent has three main components: +
+
+divider +divider +divider +divider +
+
divider +divider +divider +divider
-- A Python module containing an API that you use to build manageable workflows out of new or existing Python functions. -- A set of services that run locally or on a server to dispatch and execute workflow tasks. -- A browser-based UI from which to manage workflows and view results. +### Key Features at a Glance -You compose workflows using the Covalent API by simply adding a single line of python decorator and submit them to the Covalent server. The server analyzes the workflow to determine dependencies between tasks, then dispatches each task to its specified execution backend. Independent tasks are executed concurrently if resources are available. +Get a quick overview of what Covalent offers. Our infographic summarizes the main features, providing you with a snapshot of our capabilities: -The Covalent UI displays the progress of each workflow at the level of individual tasks. +
+
+ development +
-
-

The Covalent API

-The Covalent API is a Python module containing a small collection of classes that implement server-based workflow management. The key elements are two decorators that wrap functions to create managed *tasks* and *workflows*. +
-The task decorator is called an *electron*. The electron decorator simply turns the function into a dispatchable task. +--- -The workflow decorator is called a *lattice*. The lattice decorator turns a function composed of electrons into a manageable workflow. -
-
-

Covalent Services

-The Covalent server is a lightweight service that runs on your local machine or a server. A dispatcher analyzes workflows (lattices) and hands its component functions (electrons) off to executors. Each executor is an adaptor to a backend hardware resource. Covalent has a growing list of turn-key executors for common compute backends. If no executor exists yet for your compute platform, Covalent supports writing your own. -
-
-

The Covalent GUI

-The Covalent user interface runs as a web server on the machine where the Covalent server is running. The GUI dashboard shows a list of dispatched workflows. From there, you can drill down to workflow details or a graphical view of the workflow. You can also view logs, settings, and result sets. -
+### Know More About Covalent + +For a more in-depth description of Covalent's features and how they work, see the [Concepts](https://docs.covalent.xyz/docs/user-documentation/concepts/concepts-index/) page in the documentation. -## πŸ“š Documentation +
-The official documentation includes tips on getting started, high-level concepts, tutorials, and the API documentation, and more. To learn more, see the [Covalent documentation](https://docs.covalent.xyz/docs). +
+divider +divider +divider +divider +
-## Troubleshooting +
-Solutions to common issues can be found in the [Troubleshooting Guide](https://docs.covalent.xyz/docs/user-documentation/troubleshooting). +### Installation + +Covalent is developed using Python on Linux and macOS. The easiest way to install Covalent is by using the PyPI package manager. + +``` +pip install covalent --upgrade +``` + +For other methods of installation, please [check the docs.](https://docs.covalent.xyz/docs/get-started/install/) + +**Deployments** + +
+Covalent offers flexible deployment options, from Docker image/AMIs for self-hosting to pip package for local installations, accommodating various use cases +
+ +
+ +
+ +divider +divider +divider + +
+ +
+
-## βœ”οΈ Contributing +### Contributing -To contribute to Covalent, refer to the [Contribution Guidelines](https://github.com/AgnostiqHQ/covalent/blob/master/CONTRIBUTING.md). We use GitHub's [issue tracking](https://github.com/AgnostiqHQ/covalent/issues) to manage known issues, bugs, and pull requests. Get started by forking the develop branch and submitting a pull request with your contributions. Improvements to the documentation, including tutorials and how-to guides, are also welcome from the community. For more more information on adding tutorials, check the [Tutorial Guidelines](https://github.com/AgnostiqHQ/covalent/blob/master/doc/TUTORIAL_GUIDELINES.md) Participation in the Covalent community is governed by the [Code of Conduct](https://github.com/AgnostiqHQ/covalent/blob/master/CODE_OF_CONDUCT.md). + -## βš“ Citation +To contribute to Covalent, refer to the [Contribution Guidelines](https://github.com/AgnostiqHQ/covalent/blob/master/CONTRIBUTING.md). We use GitHub's [issue tracking](https://github.com/AgnostiqHQ/covalent/issues) to manage known issues, bugs, and pull requests. Get started by forking the `develop` branch and submitting a pull request with your contributions. Improvements to the documentation, including tutorials and how-to guides, are also welcome from the community. For more information on adding tutorials, check the [Tutorial Guidelines](https://github.com/AgnostiqHQ/covalent/blob/master/doc/TUTORIAL_GUIDELINES.md). Participation in the Covalent community is governed by the [Code of Conduct](https://github.com/AgnostiqHQ/covalent/blob/master/CODE_OF_CONDUCT.md). -Please use the following citation in any publications: +### Citation -> https://doi.org/10.5281/zenodo.5903364 +Please use the following citation in any publications. -## πŸ“ƒ License +[https://doi.org/10.5281/zenodo.5903364](https://zenodo.org/records/8369670) -Covalent is licensed under the Apache 2.0 License. See the [LICENSE](https://github.com/AgnostiqHQ/covalent/blob/master/LICENSE) file or contact the [support team](mailto:support@agnostiq.ai) for more details. +### License +Covalent is licensed under the Apache 2.0 License. See the [LICENSE](https://github.com/AgnostiqHQ/covalent/blob/master/LICENSE) file or contact the [support team](mailto:support@aqnostic.ai) for more details. ->For a detailed history of changes and new features, see the [Changelog](https://github.com/AgnostiqHQ/covalent/blob/master/CHANGELOG.md). +For a detailed history of changes and new features, see the [Changelog](https://github.com/AgnostiqHQ/covalent/blob/master/CHANGELOG.md). diff --git a/VERSION b/VERSION index 3fea2add4..abee87d30 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.233.0-rc.0 \ No newline at end of file +0.235.1-rc.0 \ No newline at end of file diff --git a/covalent/__init__.py b/covalent/__init__.py index 6d88af00d..d644a57d6 100644 --- a/covalent/__init__.py +++ b/covalent/__init__.py @@ -16,6 +16,7 @@ """Main Covalent public functionality.""" +import contextlib from importlib import metadata from . import _file_transfer as fs # nopycln: import @@ -48,9 +49,12 @@ lattice, ) from ._workflow.electron import wait # nopycln: import -from ._workflow.qelectron import qelectron # nopycln: import from .executor.utils import get_context # nopycln: import -from .quantum import QCluster # nopycln: import + +with contextlib.suppress(Exception): + # try to load qelectron modules + from ._workflow.qelectron import qelectron # nopycln: import + from .quantum import QCluster # nopycln: import __all__ = [s for s in dir() if not s.startswith("_")] diff --git a/covalent/_api/apiclient.py b/covalent/_api/apiclient.py index c4c2a5492..d3be6bd4a 100644 --- a/covalent/_api/apiclient.py +++ b/covalent/_api/apiclient.py @@ -33,7 +33,7 @@ def __init__(self, dispatcher_addr: str, adapter: HTTPAdapter = None, auto_raise self.adapter = adapter self.auto_raise = auto_raise - def prepare_headers(self, **kwargs): + def prepare_headers(self, kwargs): extra_headers = CovalentAPIClient.get_extra_headers() headers = kwargs.get("headers", {}) if headers: @@ -42,7 +42,7 @@ def prepare_headers(self, **kwargs): return headers def get(self, endpoint: str, **kwargs): - headers = self.prepare_headers(**kwargs) + headers = self.prepare_headers(kwargs) url = self.dispatcher_addr + endpoint try: with requests.Session() as session: @@ -62,7 +62,7 @@ def get(self, endpoint: str, **kwargs): return r def put(self, endpoint: str, **kwargs): - headers = self.prepare_headers() + headers = self.prepare_headers(kwargs) url = self.dispatcher_addr + endpoint try: with requests.Session() as session: @@ -81,7 +81,7 @@ def put(self, endpoint: str, **kwargs): return r def post(self, endpoint: str, **kwargs): - headers = self.prepare_headers() + headers = self.prepare_headers(kwargs) url = self.dispatcher_addr + endpoint try: with requests.Session() as session: @@ -100,7 +100,7 @@ def post(self, endpoint: str, **kwargs): return r def delete(self, endpoint: str, **kwargs): - headers = self.prepare_headers() + headers = self.prepare_headers(kwargs) url = self.dispatcher_addr + endpoint try: with requests.Session() as session: diff --git a/covalent/_dispatcher_plugins/local.py b/covalent/_dispatcher_plugins/local.py index 8760cec96..9857342cf 100644 --- a/covalent/_dispatcher_plugins/local.py +++ b/covalent/_dispatcher_plugins/local.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import tempfile from copy import deepcopy from functools import wraps @@ -47,6 +48,9 @@ dispatch_cache_dir.mkdir(parents=True, exist_ok=True) +BASE_ENDPOINT = os.getenv("COVALENT_DISPATCH_BASE_ENDPOINT", "/api/v2/dispatches") + + def get_redispatch_request_body_v2( dispatch_id: str, staging_dir: str, @@ -540,10 +544,10 @@ def register_manifest( dispatcher_addr = format_server_url() stripped = strip_local_uris(manifest) if push_assets else manifest - endpoint = "/api/v2/dispatches" + endpoint = BASE_ENDPOINT if parent_dispatch_id: - endpoint = f"{endpoint}/{parent_dispatch_id}/subdispatches" + endpoint = f"{BASE_ENDPOINT}/{parent_dispatch_id}/sublattices" r = APIClient(dispatcher_addr).post(endpoint, data=stripped.model_dump_json()) r.raise_for_status() @@ -596,7 +600,7 @@ def _upload(assets: List[AssetSchema]): number_uploaded = 0 for i, asset in enumerate(assets): if not asset.remote_uri or not asset.uri: - app_log.debug(f"Skipping asset {i+1} out of {total}") + app_log.debug(f"Skipping asset {i + 1} out of {total}") continue if asset.remote_uri.startswith(local_scheme_prefix): copy_file_locally(asset.uri, asset.remote_uri) @@ -604,7 +608,7 @@ def _upload(assets: List[AssetSchema]): else: _upload_asset(asset.uri, asset.remote_uri) number_uploaded += 1 - app_log.debug(f"Uploaded asset {i+1} out of {total}.") + app_log.debug(f"Uploaded asset {i + 1} out of {total}.") app_log.debug(f"uploaded {number_uploaded} assets.") @@ -615,6 +619,7 @@ def _upload_asset(local_uri, remote_uri): else: local_path = local_uri + filesize = os.path.getsize(local_path) with open(local_path, "rb") as reader: app_log.debug(f"uploading to {remote_uri}") f = furl(remote_uri) @@ -624,6 +629,11 @@ def _upload_asset(local_uri, remote_uri): dispatcher_addr = f"{scheme}://{host}:{port}" endpoint = str(f.path) api_client = APIClient(dispatcher_addr) + if f.query: + endpoint = f"{endpoint}?{f.query}" + + # Workaround for Requests bug when streaming from empty files + data = reader.read() if filesize < 50 else reader - r = api_client.put(endpoint, data=reader) + r = api_client.put(endpoint, headers={"Content-Length": str(filesize)}, data=data) r.raise_for_status() diff --git a/covalent/_results_manager/result.py b/covalent/_results_manager/result.py index a42f514a6..8a6e3520b 100644 --- a/covalent/_results_manager/result.py +++ b/covalent/_results_manager/result.py @@ -18,7 +18,7 @@ import os import re from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, List, Set, Union +from typing import TYPE_CHECKING, Any, Dict, List, Union from .._shared_files import logger from .._shared_files.config import get_config @@ -516,34 +516,3 @@ def _convert_to_electron_result(self) -> Any: """ return self._result - - -def _filter_cova_decorators(function_string: str, cova_imports: Set[str]) -> str: - """ - Given a string representing a function, comment out any Covalent-related decorators. - - Args - function_string: A string representation of a workflow function. - - Returns: - The function string with Covalent-related decorators commented out. - """ - - has_cova_decorator = False - in_decorator = 0 - function_lines = function_string.split("\n") - for i in range(len(function_lines)): - line = function_lines[i].strip() - if in_decorator > 0: - function_lines[i] = f"# {function_lines[i]}" - in_decorator += line.count("(") - in_decorator -= line.count(")") - elif line.startswith("@"): - decorator_name = line.split("@")[1].split(".")[0].split("(")[0] - if decorator_name in cova_imports: - function_lines[i] = f"# {function_lines[i]}" - has_cova_decorator = True - in_decorator += line.count("(") - in_decorator -= line.count(")") - - return "\n".join(function_lines) if has_cova_decorator else function_string diff --git a/covalent/_serialize/electron.py b/covalent/_serialize/electron.py index fe5763675..b90879fbf 100644 --- a/covalent/_serialize/electron.py +++ b/covalent/_serialize/electron.py @@ -210,8 +210,8 @@ def _get_node_custom_assets(node_attrs: dict) -> Dict[str, AssetSchema]: def serialize_node(node_id: int, node_attrs: dict, node_storage_path) -> ElectronSchema: meta = _serialize_node_metadata(node_attrs, node_storage_path) assets = _serialize_node_assets(node_attrs, node_storage_path) - custom_assets = _get_node_custom_assets(node_attrs) - return ElectronSchema(id=node_id, metadata=meta, assets=assets, custom_assets=custom_assets) + assets._custom = _get_node_custom_assets(node_attrs) + return ElectronSchema(id=node_id, metadata=meta, assets=assets) def deserialize_node(e: ElectronSchema, metadata_only: bool = False) -> dict: diff --git a/covalent/_serialize/lattice.py b/covalent/_serialize/lattice.py index 3ab39f2bc..3d61fcfc1 100644 --- a/covalent/_serialize/lattice.py +++ b/covalent/_serialize/lattice.py @@ -40,10 +40,6 @@ "workflow_function_string": AssetType.TEXT, "doc": AssetType.TEXT, "inputs": AssetType.TRANSPORTABLE, - "named_args": AssetType.TRANSPORTABLE, - "named_kwargs": AssetType.TRANSPORTABLE, - "cova_imports": AssetType.JSONABLE, - "lattice_imports": AssetType.TEXT, "hooks": AssetType.JSONABLE, } @@ -112,33 +108,6 @@ def _serialize_lattice_assets(lat, storage_path: str) -> LatticeAssets: lat.inputs, ASSET_TYPES["inputs"], storage_path, ASSET_FILENAME_MAP["inputs"] ) - # Deprecate - named_args_asset = save_asset( - lat.named_args, - ASSET_TYPES["named_args"], - storage_path, - ASSET_FILENAME_MAP["named_args"], - ) - named_kwargs_asset = save_asset( - lat.named_kwargs, - ASSET_TYPES["named_kwargs"], - storage_path, - ASSET_FILENAME_MAP["named_kwargs"], - ) - cova_imports_asset = save_asset( - lat.cova_imports, - ASSET_TYPES["cova_imports"], - storage_path, - ASSET_FILENAME_MAP["cova_imports"], - ) - lattice_imports_asset = save_asset( - lat.lattice_imports, - ASSET_TYPES["lattice_imports"], - storage_path, - ASSET_FILENAME_MAP["lattice_imports"], - ) - - # NOTE: these are actually JSONable hooks_asset = save_asset( lat.metadata["hooks"], ASSET_TYPES["hooks"], @@ -151,10 +120,6 @@ def _serialize_lattice_assets(lat, storage_path: str) -> LatticeAssets: workflow_function_string=workflow_func_str_asset, doc=docstring_asset, inputs=inputs_asset, - named_args=named_args_asset, - named_kwargs=named_kwargs_asset, - cova_imports=cova_imports_asset, - lattice_imports=lattice_imports_asset, hooks=hooks_asset, ) @@ -166,20 +131,12 @@ def _deserialize_lattice_assets(assets: LatticeAssets) -> dict: ) doc = load_asset(assets.doc, ASSET_TYPES["doc"]) inputs = load_asset(assets.inputs, ASSET_TYPES["inputs"]) - named_args = load_asset(assets.named_args, ASSET_TYPES["named_args"]) - named_kwargs = load_asset(assets.named_kwargs, ASSET_TYPES["named_kwargs"]) - cova_imports = load_asset(assets.cova_imports, ASSET_TYPES["cova_imports"]) - lattice_imports = load_asset(assets.lattice_imports, ASSET_TYPES["lattice_imports"]) hooks = load_asset(assets.hooks, ASSET_TYPES["hooks"]) return { "workflow_function": workflow_function, "workflow_function_string": workflow_function_string, "__doc__": doc, "inputs": inputs, - "named_args": named_args, - "named_kwargs": named_kwargs, - "cova_imports": cova_imports, - "lattice_imports": lattice_imports, "metadata": { "hooks": hooks, }, @@ -194,12 +151,10 @@ def _get_lattice_custom_assets(lat: Lattice) -> Dict[str, AssetSchema]: def serialize_lattice(lat, storage_path: str) -> LatticeSchema: meta = _serialize_lattice_metadata(lat) assets = _serialize_lattice_assets(lat, storage_path) - custom_assets = _get_lattice_custom_assets(lat) + assets._custom = _get_lattice_custom_assets(lat) tg = serialize_transport_graph(lat.transport_graph, storage_path) - return LatticeSchema( - metadata=meta, assets=assets, custom_assets=custom_assets, transport_graph=tg - ) + return LatticeSchema(metadata=meta, assets=assets, transport_graph=tg) def deserialize_lattice(model: LatticeSchema) -> Lattice: diff --git a/covalent/_shared_files/qelectron_utils.py b/covalent/_shared_files/qelectron_utils.py index 1c0a8c6f3..387722a2a 100644 --- a/covalent/_shared_files/qelectron_utils.py +++ b/covalent/_shared_files/qelectron_utils.py @@ -14,10 +14,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import importlib +import inspect +from typing import Any, Tuple -from covalent.quantum.qserver.database import Database +import cloudpickle +from pennylane._device import Device from .logger import app_log +from .pickling import _qml_mods_pickle + +_IMPORT_PATH_SEPARATOR = ":" def get_qelectron_db_path(dispatch_id: str, task_id: int): @@ -28,6 +35,8 @@ def get_qelectron_db_path(dispatch_id: str, task_id: int): AS WHERE THE USER'S TASK FUNCTION IS BEING RUN. """ + from covalent.quantum.qserver.database import Database + database = Database() db_path = database.get_db_path(dispatch_id=dispatch_id, node_id=task_id) @@ -38,3 +47,50 @@ def get_qelectron_db_path(dispatch_id: str, task_id: int): else: app_log.debug(f"Qelectron database not found for task {task_id}") return None + + +@_qml_mods_pickle +def cloudpickle_serialize(obj): + return cloudpickle.dumps(obj) + + +def cloudpickle_deserialize(obj): + return cloudpickle.loads(obj) + + +def select_first_executor(qnode, executors): + """Selects the first executor to run the qnode""" + return executors[0] + + +def get_import_path(obj) -> Tuple[str, str]: + """ + Determine the import path of an object. + """ + if module := inspect.getmodule(obj): + module_path = module.__name__ + class_name = obj.__name__ + return f"{module_path}{_IMPORT_PATH_SEPARATOR}{class_name}" + raise RuntimeError(f"Unable to determine import path for {obj}.") + + +def import_from_path(path: str) -> Any: + """ + Import a class from a path. + """ + module_path, class_name = path.split(_IMPORT_PATH_SEPARATOR) + module = importlib.import_module(module_path) + return getattr(module, class_name) + + +def get_original_shots(dev: Device): + """ + Recreate vector of shots if device has a shot vector. + """ + if not dev.shot_vector: + return dev.shots + + shot_sequence = [] + for shots in dev.shot_vector: + shot_sequence.extend([shots.shots] * shots.copies) + return type(dev.shot_vector)(shot_sequence) diff --git a/covalent/_shared_files/qresult_utils.py b/covalent/_shared_files/qresult_utils.py index 9ffe3c182..efad64797 100644 --- a/covalent/_shared_files/qresult_utils.py +++ b/covalent/_shared_files/qresult_utils.py @@ -23,7 +23,7 @@ from pennylane.tape import QuantumTape from .._workflow.qdevice import QEDevice -from .utils import get_original_shots +from .qelectron_utils import get_original_shots def re_execute( diff --git a/covalent/_shared_files/schemas/electron.py b/covalent/_shared_files/schemas/electron.py index b245cc93d..c5da65e1d 100644 --- a/covalent/_shared_files/schemas/electron.py +++ b/covalent/_shared_files/schemas/electron.py @@ -19,7 +19,7 @@ from datetime import datetime from typing import Dict, Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel from .asset import AssetSchema from .common import StatusEnum @@ -91,6 +91,8 @@ class ElectronAssets(BaseModel): # user dependent assets hooks: AssetSchema + _custom: Optional[Dict[str, AssetSchema]] = None + class ElectronMetadata(BaseModel): task_group_id: int @@ -103,6 +105,8 @@ class ElectronMetadata(BaseModel): start_time: Optional[datetime] = None end_time: Optional[datetime] = None + _custom: Optional[Dict] = None + # For use by redispatch def reset(self): self.status = StatusEnum.NEW_OBJECT @@ -114,12 +118,3 @@ class ElectronSchema(BaseModel): id: int metadata: ElectronMetadata assets: ElectronAssets - custom_assets: Optional[Dict[str, AssetSchema]] = None - - @field_validator("custom_assets") - def check_custom_asset_keys(cls, v): - if v is not None: - for key in v: - if key in ASSET_FILENAME_MAP: - raise ValueError(f"Asset {key} conflicts with built-in key") - return v diff --git a/covalent/_shared_files/schemas/lattice.py b/covalent/_shared_files/schemas/lattice.py index 6a3e2bbf9..783b966ee 100644 --- a/covalent/_shared_files/schemas/lattice.py +++ b/covalent/_shared_files/schemas/lattice.py @@ -18,7 +18,7 @@ from typing import Dict, Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel from .asset import AssetSchema from .transport_graph import TransportGraphSchema @@ -39,10 +39,6 @@ "workflow_function_string", "__doc__", "inputs", - "named_args", - "named_kwargs", - "cova_imports", - "lattice_imports", # user dependent assets "hooks", } @@ -83,14 +79,18 @@ class LatticeAssets(BaseModel): workflow_function_string: AssetSchema doc: AssetSchema # __doc__ inputs: AssetSchema - named_args: AssetSchema - named_kwargs: AssetSchema - cova_imports: AssetSchema - lattice_imports: AssetSchema + + # Deprecated + named_args: AssetSchema = AssetSchema(size=0) + named_kwargs: AssetSchema = AssetSchema(size=0) + cova_imports: AssetSchema = AssetSchema(size=0) + lattice_imports: AssetSchema = AssetSchema(size=0) # lattice.metadata hooks: AssetSchema + _custom: Optional[Dict[str, AssetSchema]] = None + class LatticeMetadata(BaseModel): name: str # __name__ @@ -101,18 +101,11 @@ class LatticeMetadata(BaseModel): python_version: Optional[str] = None covalent_version: Optional[str] = None + _custom: Optional[Dict] = None + class LatticeSchema(BaseModel): metadata: LatticeMetadata assets: LatticeAssets - custom_assets: Optional[Dict[str, AssetSchema]] = None transport_graph: TransportGraphSchema - - @field_validator("custom_assets") - def check_custom_asset_keys(cls, v): - if v is not None: - for key in v: - if key in ASSET_FILENAME_MAP: - raise ValueError(f"Asset {key} conflicts with built-in key") - return v diff --git a/covalent/_shared_files/schemas/result.py b/covalent/_shared_files/schemas/result.py index fa771bf9b..3160c3708 100644 --- a/covalent/_shared_files/schemas/result.py +++ b/covalent/_shared_files/schemas/result.py @@ -17,7 +17,7 @@ """FastAPI models for /api/v1/resultv2 endpoints""" from datetime import datetime -from typing import Optional +from typing import Dict, Optional from pydantic import BaseModel @@ -54,6 +54,8 @@ class ResultMetadata(BaseModel): start_time: Optional[datetime] = None end_time: Optional[datetime] = None + _custom: Optional[Dict] = None + # For use by redispatch def reset(self): self.dispatch_id = "" @@ -67,6 +69,8 @@ class ResultAssets(BaseModel): result: AssetSchema error: AssetSchema + _custom: Optional[Dict[str, AssetSchema]] = None + class ResultSchema(BaseModel): metadata: ResultMetadata diff --git a/covalent/_shared_files/utils.py b/covalent/_shared_files/utils.py index e7bd60368..f41899f24 100644 --- a/covalent/_shared_files/utils.py +++ b/covalent/_shared_files/utils.py @@ -16,19 +16,14 @@ """General utils for Covalent.""" -import importlib import inspect import shutil import socket from datetime import timedelta -from typing import Any, Callable, Dict, List, Tuple - -import cloudpickle -from pennylane._device import Device +from typing import Callable, Dict, List, Tuple from . import logger from .config import get_config -from .pickling import _qml_mods_pickle app_log = logger.app_log log_stack_info = logger.log_stack_info @@ -37,9 +32,6 @@ DEFAULT_UI_PORT = get_config("user_interface.port") -_IMPORT_PATH_SEPARATOR = ":" - - def get_ui_url(path): baseUrl = f"http://{DEFAULT_UI_ADDRESS}:{DEFAULT_UI_PORT}" return f"{baseUrl}{path}" @@ -264,49 +256,16 @@ def copy_file_locally(src_uri, dest_uri): shutil.copyfile(src_path, dest_path) -@_qml_mods_pickle -def cloudpickle_serialize(obj): - return cloudpickle.dumps(obj) - - -def cloudpickle_deserialize(obj): - return cloudpickle.loads(obj) - - -def select_first_executor(qnode, executors): - """Selects the first executor to run the qnode""" - return executors[0] - - -def get_import_path(obj) -> Tuple[str, str]: - """ - Determine the import path of an object. +def get_qelectron_db_path(dispatch_id: str, task_id: int): """ - module = inspect.getmodule(obj) - if module: - module_path = module.__name__ - class_name = obj.__name__ - return f"{module_path}{_IMPORT_PATH_SEPARATOR}{class_name}" - raise RuntimeError(f"Unable to determine import path for {obj}.") + Return the path to the Qelectron database for a given dispatch_id and task_id. - -def import_from_path(path: str) -> Any: - """ - Import a class from a path. + This is a proxy to qelectron_utils.get_qelectron_db_path() for removing qelectron dependency. """ - module_path, class_name = path.split(_IMPORT_PATH_SEPARATOR) - module = importlib.import_module(module_path) - return getattr(module, class_name) + try: + from .qelectron_utils import get_qelectron_db_path -def get_original_shots(dev: Device): - """ - Recreate vector of shots if device has a shot vector. - """ - if not dev.shot_vector: - return dev.shots - - shot_sequence = [] - for shots in dev.shot_vector: - shot_sequence.extend([shots.shots] * shots.copies) - return type(dev.shot_vector)(shot_sequence) + return get_qelectron_db_path(dispatch_id, task_id) + except ImportError: + return None diff --git a/covalent/_workflow/electron.py b/covalent/_workflow/electron.py index 12f18cbf5..e6a6e4648 100644 --- a/covalent/_workflow/electron.py +++ b/covalent/_workflow/electron.py @@ -96,6 +96,7 @@ def __init__( self.metadata = metadata self.task_group_id = task_group_id self._packing_tasks = packing_tasks + self._function_string = get_serialized_function_str(function) @property def packing_tasks(self) -> bool: @@ -428,6 +429,11 @@ def __call__(self, *args, **kwargs) -> Union[Any, "Electron"]: active_lattice.replace_electrons[name] = replacement_electron return bound_electron + # Avoid direct attribute access since that might trigger + # Electron.__getattr__ when executors build sublattices + # constructed with older versions of Covalent + function_string = self.__dict__.get("_function_string") + # Handle sublattices by injecting _build_sublattice_graph node if isinstance(self.function, Lattice): parent_metadata = active_lattice.metadata.copy() @@ -442,7 +448,6 @@ def __call__(self, *args, **kwargs) -> Union[Any, "Electron"]: ) name = sublattice_prefix + self.function.__name__ - function_string = get_serialized_function_str(self.function) bound_electron = sub_electron( self.function, json.dumps(parent_metadata), *args, **kwargs ) @@ -463,7 +468,7 @@ def __call__(self, *args, **kwargs) -> Union[Any, "Electron"]: name=self.function.__name__, function=self.function, metadata=self.metadata.copy(), - function_string=get_serialized_function_str(self.function), + function_string=function_string, task_group_id=self.task_group_id if self.packing_tasks else None, ) self.task_group_id = self.task_group_id if self.packing_tasks else self.node_id @@ -571,8 +576,8 @@ def _auto_list_node(*args, **kwargs): elif isinstance(param_value, dict): - def _auto_dict_node(*args, **kwargs): - return dict(kwargs) + def _auto_dict_node(keys, values): + return {keys[i]: values[i] for i in range(len(keys))} dict_electron = Electron( function=_auto_dict_node, @@ -580,7 +585,7 @@ def _auto_dict_node(*args, **kwargs): task_group_id=self.task_group_id, packing_tasks=True and active_lattice.task_packing, ) # Group the auto-generated node with the main node. - bound_electron = dict_electron(**param_value) + bound_electron = dict_electron(list(param_value.keys()), list(param_value.values())) transport_graph.set_node_value(bound_electron.node_id, "name", electron_dict_prefix) transport_graph.add_edge( dict_electron.node_id, @@ -608,32 +613,6 @@ def _auto_dict_node(*args, **kwargs): arg_index=arg_index, ) - def add_collection_node_to_graph(self, graph: "_TransportGraph", prefix: str) -> int: - """ - Adds the node to lattice's transport graph in the case - where a collection of electrons is passed as an argument - to another electron. - - Args: - graph: Transport graph of the lattice - prefix: Prefix of the node - - Returns: - node_id: Node id of the added node - """ - - new_metadata = encode_metadata(DEFAULT_METADATA_VALUES.copy()) - if "executor" in self.metadata: - new_metadata["executor"] = self.metadata["executor"] - new_metadata["executor_data"] = self.metadata["executor_data"] - - node_id = graph.add_node( - name=prefix, - function=to_decoded_electron_collection, - metadata=new_metadata, - function_string=get_serialized_function_str(to_decoded_electron_collection), - ) - return node_id def wait_for(self, electrons: Union["Electron", Iterable["Electron"]]): @@ -872,16 +851,6 @@ def wait(child, parents): return child -@electron -def to_decoded_electron_collection(**x): - """Interchanges order of serialize -> collection""" - collection = list(x.values())[0] - if isinstance(collection, list): - return TransportableObject.deserialize_list(collection) - elif isinstance(collection, dict): - return TransportableObject.deserialize_dict(collection) - - # Copied from runner.py def _build_sublattice_graph(sub: Lattice, json_parent_metadata: str, *args, **kwargs): import os @@ -893,6 +862,8 @@ def _build_sublattice_graph(sub: Lattice, json_parent_metadata: str, *args, **kw sub.build_graph(*args, **kwargs) + DISABLE_LEGACY_SUBLATTICES = os.environ.get("COVALENT_DISABLE_LEGACY_SUBLATTICES") == "1" + try: # Attempt multistage sublattice dispatch. For now we require # the executor to reach the Covalent server @@ -916,5 +887,7 @@ def _build_sublattice_graph(sub: Lattice, json_parent_metadata: str, *args, **kw except Exception as ex: # Fall back to legacy sublattice handling + if DISABLE_LEGACY_SUBLATTICES: + raise print("Falling back to legacy sublattice handling") return sub.serialize_to_json() diff --git a/covalent/_workflow/lattice.py b/covalent/_workflow/lattice.py index 84f74f6b1..146b837d9 100644 --- a/covalent/_workflow/lattice.py +++ b/covalent/_workflow/lattice.py @@ -47,7 +47,7 @@ from ..executor import BaseExecutor from ..triggers import BaseTrigger -from .._shared_files.utils import get_imports, get_serialized_function_str +from .._shared_files.utils import get_serialized_function_str consumable_constraints = [] @@ -81,10 +81,7 @@ def __init__( self.__doc__ = self.workflow_function.__doc__ self.post_processing = False self.inputs = None - self.named_args = None - self.named_kwargs = None self.electron_outputs = {} - self.lattice_imports, self.cova_imports = get_imports(self.workflow_function) self.workflow_function = TransportableObject.make_transportable(self.workflow_function) @@ -105,8 +102,6 @@ def serialize_to_json(self) -> str: attributes["transport_graph"] = self.transport_graph.serialize_to_json() attributes["inputs"] = self.inputs.to_dict() - attributes["named_args"] = self.named_args.to_dict() - attributes["named_kwargs"] = self.named_kwargs.to_dict() attributes["electron_outputs"] = {} for node_name, output in self.electron_outputs.items(): @@ -121,8 +116,6 @@ def deserialize_from_json(json_data: str) -> None: for node_name, object_dict in attributes["electron_outputs"].items(): attributes["electron_outputs"][node_name] = TransportableObject.from_dict(object_dict) - attributes["named_kwargs"] = TransportableObject.from_dict(attributes["named_kwargs"]) - attributes["named_args"] = TransportableObject.from_dict(attributes["named_args"]) attributes["inputs"] = TransportableObject.from_dict(attributes["inputs"]) if attributes["transport_graph"]: @@ -209,9 +202,6 @@ def build_graph(self, *args, **kwargs) -> None: new_kwargs = dict(named_kwargs.items()) self.inputs = TransportableObject({"args": args, "kwargs": kwargs}) - self.named_args = TransportableObject(named_args) - self.named_kwargs = TransportableObject(named_kwargs) - self.lattice_imports, self.cova_imports = get_imports(workflow_function) # Set any lattice metadata not explicitly set by the user constraint_names = {"executor", "workflow_executor", "hooks"} diff --git a/covalent/_workflow/qelectron.py b/covalent/_workflow/qelectron.py index e0356723a..641e4a78e 100644 --- a/covalent/_workflow/qelectron.py +++ b/covalent/_workflow/qelectron.py @@ -19,7 +19,7 @@ import pennylane as qml -from .._shared_files.utils import get_import_path, get_original_shots +from .._shared_files.qelectron_utils import get_import_path, get_original_shots from ..quantum.qcluster import QCluster from ..quantum.qcluster.base import AsyncBaseQCluster, BaseQExecutor from ..quantum.qcluster.simulator import Simulator diff --git a/covalent/_workflow/qnode.py b/covalent/_workflow/qnode.py index 7e97fee34..f2019aaa0 100644 --- a/covalent/_workflow/qnode.py +++ b/covalent/_workflow/qnode.py @@ -25,9 +25,9 @@ from .._results_manager.qresult import QNodeFutureResult from .._shared_files import logger +from .._shared_files.qelectron_utils import get_original_shots from .._shared_files.qinfo import QElectronInfo, QNodeSpecs from .._shared_files.qresult_utils import re_execute -from .._shared_files.utils import get_original_shots from ..executor.qbase import BaseQExecutor from .qdevice import QEDevice diff --git a/covalent/cloud_resource_manager/core.py b/covalent/cloud_resource_manager/core.py index 4721fb70c..eb63bb604 100644 --- a/covalent/cloud_resource_manager/core.py +++ b/covalent/cloud_resource_manager/core.py @@ -24,7 +24,7 @@ from configparser import ConfigParser from pathlib import Path from types import ModuleType -from typing import Callable, Dict, List, Optional, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Union from covalent._shared_files.config import set_config from covalent.executor import _executor_manager @@ -425,6 +425,17 @@ def up(self, print_callback: Callable, dry_run: bool = True) -> None: # Setup terraform infra variables as passed by the user tf_vars_env_dict = os.environ.copy() + # Write the default values to the terraform.tfvars file + infra_settings = self.ExecutorInfraDefaults.schema()["properties"] + with open(tfvars_file, "w", encoding="utf-8") as f: + for key, value in infra_settings.items(): + if "default" in value: + tf_vars_env_dict[f"TF_VAR_{key}"] = value["default"] + + if value["default"]: + f.write(f'{key}={self._convert_to_tfvar(value["default"])}\n') + + # Overwrite the default values with the user passed values if self.executor_options: with open(tfvars_file, "w", encoding="utf-8") as f: for key, value in self.executor_options.items(): @@ -526,3 +537,29 @@ def status(self) -> None: # Run `terraform state list` return self._run_in_subprocess(cmd=tf_state, env_vars=self._terraform_log_env_vars) + + @staticmethod + def _convert_to_tfvar(value: Any) -> Any: + """ + Convert the value to a string that can be parsed as a terraform variable. + + Args: + value: Value to convert + + Returns: + Converted value + + """ + if value is True: + return "true" + if value is False: + return "false" + if value is None: + return "null" + if isinstance(value, str): + return f'"{value}"' + if isinstance(value, Sequence): + values = [CloudResourceManager._convert_to_tfvar(v) for v in value] + return f"[{', '.join(values)}]" + + return str(value) diff --git a/covalent/executor/__init__.py b/covalent/executor/__init__.py index ffc139f96..be68b1182 100644 --- a/covalent/executor/__init__.py +++ b/covalent/executor/__init__.py @@ -31,7 +31,6 @@ from .._shared_files import logger from .._shared_files.config import get_config, update_config -from ..quantum import QCluster, Simulator from .base import BaseExecutor app_log = logger.app_log @@ -284,6 +283,8 @@ class _QExecutorManager: """ def __init__(self): + from ..quantum import QCluster, Simulator + # Dictionary mapping executor name to executor class self.executor_plugins_map: Dict[str, Any] = { "QCluster": QCluster, @@ -370,11 +371,12 @@ def validate_module(self, module_obj) -> None: _executor_manager = _ExecutorManager() -_qexecutor_manager = _QExecutorManager() - for name in _executor_manager.executor_plugins_map: plugin_class = _executor_manager.executor_plugins_map[name] globals()[plugin_class.__name__] = plugin_class -for qexecutor_cls in _qexecutor_manager.executor_plugins_map.values(): - globals()[qexecutor_cls.__name__] = qexecutor_cls +# Only creating the qexecutor manager if its requirements are installed +with contextlib.suppress(ImportError): + _qexecutor_manager = _QExecutorManager() + for qexecutor_cls in _qexecutor_manager.executor_plugins_map.values(): + globals()[qexecutor_cls.__name__] = qexecutor_cls diff --git a/covalent/executor/quantum_plugins/qiskit_plugin/qiskit_plugin.py b/covalent/executor/quantum_plugins/qiskit_plugin/qiskit_plugin.py index 43531ddef..a959122e3 100644 --- a/covalent/executor/quantum_plugins/qiskit_plugin/qiskit_plugin.py +++ b/covalent/executor/quantum_plugins/qiskit_plugin/qiskit_plugin.py @@ -25,7 +25,7 @@ from runtime_sampler import QiskitRuntimeSampler from covalent._shared_files.config import get_config -from covalent._shared_files.utils import import_from_path +from covalent._shared_files.qelectron_utils import import_from_path from covalent.executor.qbase import ( AsyncBaseQExecutor, BaseThreadPoolQExecutor, diff --git a/covalent/executor/utils/wrappers.py b/covalent/executor/utils/wrappers.py index 4cbd7fccb..8f7fd8256 100644 --- a/covalent/executor/utils/wrappers.py +++ b/covalent/executor/utils/wrappers.py @@ -29,7 +29,7 @@ import requests -from covalent._shared_files.qelectron_utils import get_qelectron_db_path +from covalent._shared_files.utils import get_qelectron_db_path from covalent._workflow.depsbash import DepsBash from covalent._workflow.depscall import RESERVED_RETVAL_KEY__FILES, DepsCall from covalent._workflow.depspip import DepsPip diff --git a/covalent/quantum/qclient/local_client.py b/covalent/quantum/qclient/local_client.py index 89bff7382..8ca46c99d 100644 --- a/covalent/quantum/qclient/local_client.py +++ b/covalent/quantum/qclient/local_client.py @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ..._shared_files.utils import cloudpickle_deserialize, cloudpickle_serialize +from ..._shared_files.qelectron_utils import cloudpickle_deserialize, cloudpickle_serialize from ..qserver import LocalQServer from .base_client import BaseQClient diff --git a/covalent/quantum/qcluster/clusters.py b/covalent/quantum/qcluster/clusters.py index fe56aecbf..7aa1de314 100644 --- a/covalent/quantum/qcluster/clusters.py +++ b/covalent/quantum/qcluster/clusters.py @@ -17,7 +17,7 @@ import base64 from typing import Callable, Union -from ..._shared_files.utils import cloudpickle_deserialize, cloudpickle_serialize +from ..._shared_files.qelectron_utils import cloudpickle_deserialize, cloudpickle_serialize from .base import AsyncBaseQCluster, BaseQExecutor from .default_selectors import selector_map diff --git a/covalent/quantum/qserver/core.py b/covalent/quantum/qserver/core.py index f8efe09d8..2bc31c913 100644 --- a/covalent/quantum/qserver/core.py +++ b/covalent/quantum/qserver/core.py @@ -25,12 +25,12 @@ from pennylane.tape import QuantumScript -from ..._shared_files.qinfo import QElectronInfo, QNodeSpecs -from ..._shared_files.utils import ( +from ..._shared_files.qelectron_utils import ( cloudpickle_deserialize, cloudpickle_serialize, select_first_executor, ) +from ..._shared_files.qinfo import QElectronInfo, QNodeSpecs from ...executor.utils import get_context from ..qcluster.base import AsyncBaseQCluster, BaseQExecutor from .database import Database diff --git a/covalent/triggers/database_trigger.py b/covalent/triggers/database_trigger.py index d2a53402e..6baa0141c 100644 --- a/covalent/triggers/database_trigger.py +++ b/covalent/triggers/database_trigger.py @@ -19,9 +19,6 @@ from threading import Event from typing import List -from sqlalchemy import create_engine -from sqlalchemy.orm import Session - from covalent._shared_files import logger from .base import BaseTrigger @@ -88,6 +85,12 @@ def observe(self) -> None: where conditions are met or until stop has being called """ + # Since these modules are only used server-side, delay their + # imports to avoid introducing a sqlalchemy requirement to + # SDK-only installs + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + app_log.debug("Inside DatabaseTrigger's observe") event_count = 0 diff --git a/covalent_dispatcher/_cli/cli.py b/covalent_dispatcher/_cli/cli.py index f24f24aaf..f352305e6 100644 --- a/covalent_dispatcher/_cli/cli.py +++ b/covalent_dispatcher/_cli/cli.py @@ -25,18 +25,7 @@ from rich.console import Console from .groups import db, deploy -from .service import ( - cluster, - config, - logs, - migrate_legacy_result_object, - print_header, - purge, - restart, - start, - status, - stop, -) +from .service import cluster, config, logs, print_header, purge, restart, start, status, stop # Main entrypoint @@ -73,7 +62,6 @@ def cli(ctx: click.Context, version: bool) -> None: cli.add_command(cluster) cli.add_command(db) cli.add_command(config) -cli.add_command(migrate_legacy_result_object) cli.add_command(deploy) if __name__ == "__main__": diff --git a/covalent_dispatcher/_cli/groups/deploy_group.py b/covalent_dispatcher/_cli/groups/deploy_group.py index a9510c09f..ff8c06a23 100644 --- a/covalent_dispatcher/_cli/groups/deploy_group.py +++ b/covalent_dispatcher/_cli/groups/deploy_group.py @@ -24,7 +24,6 @@ from pathlib import Path from typing import Callable, Dict, Tuple -import boto3 import click from rich.console import Console from rich.table import Table @@ -176,12 +175,20 @@ def up(executor_name: str, vars: Dict, help: bool, dry_run: bool, verbose: bool) $ covalent deploy up awslambda --verbose --region=us-east-1 --instance-type=t2.micro """ + cmd_options = {key[2:]: value for key, value in (var.split("=") for var in vars)} - if msg := validate_args(cmd_options): - # Message is not None, so there was an error. - click.echo(msg) + + try: + crm = get_crm_object(executor_name, cmd_options) + except (KeyError, AttributeError): + click.echo( + click.style( + f"Warning: '{executor_name}' is not a valid executor for deployment.", + fg="yellow", + ) + ) sys.exit(1) - crm = get_crm_object(executor_name, cmd_options) + if help: click.echo(Console().print(get_up_help_table(crm))) sys.exit(0) @@ -212,7 +219,18 @@ def down(executor_name: str, verbose: bool) -> None: $ covalent deploy down ecs --verbose """ - crm = get_crm_object(executor_name) + + try: + crm = get_crm_object(executor_name) + except (KeyError, AttributeError): + click.echo( + click.style( + f"Warning: '{executor_name}' is not a valid executor for deployment.", + fg="yellow", + ) + ) + sys.exit(1) + _command = partial(crm.down) _run_command_and_show_output(_command, "Destroying resources...", verbose=verbose) @@ -247,7 +265,7 @@ def status(executor_names: Tuple[str]) -> None: for name in _executor_manager.executor_plugins_map if name not in ["dask", "local", "remote_executor"] ] - click.echo(f"Executors: {', '.join(executor_names)}") + click.echo(f"Installed executors: {', '.join(executor_names)}") table = Table() table.add_column("Executor", justify="center") @@ -260,7 +278,9 @@ def status(executor_names: Tuple[str]) -> None: crm = get_crm_object(executor_name) crm_status = crm.status() table.add_row(executor_name, crm_status, description[crm_status]) - except KeyError: + except (KeyError, AttributeError): + # Added the AttributeError here as well in case the executor does not + # have the ExecutorPluginDefaults or ExecutorInfraDefaults classes. invalid_executor_names.append(executor_name) click.echo(Console().print(table)) @@ -268,23 +288,7 @@ def status(executor_names: Tuple[str]) -> None: if invalid_executor_names: click.echo( click.style( - f"Warning: {', '.join(invalid_executor_names)} are not valid executors.", + f"Warning: Invalid executors for deployment -> '{', '.join(invalid_executor_names)}'", fg="yellow", ) ) - - -def validate_args(args: dict): - message = None - if len(args) == 0: - return message - if "region" in args and args["region"] != "": - if not validate_region(args["region"]): - return f"Unable to find the provided region: {args['region']}" - - -def validate_region(region_name: str): - ec2_client = boto3.client("ec2") - response = ec2_client.describe_regions() - exists = region_name in [item["RegionName"] for item in response["Regions"]] - return exists diff --git a/covalent_dispatcher/_cli/migrate.py b/covalent_dispatcher/_cli/migrate.py deleted file mode 100644 index 032aafbf0..000000000 --- a/covalent_dispatcher/_cli/migrate.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright 2021 Agnostiq Inc. -# -# This file is part of Covalent. -# -# Licensed under the Apache License 2.0 (the "License"). A copy of the -# License may be obtained with this software package or at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Use of this file is prohibited except in compliance with the License. -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Utils for migrating legacy (0.110-era) result object to a modern result object.""" - -import pickle - -from covalent._results_manager import Result -from covalent._shared_files import logger -from covalent._shared_files.defaults import ( - attr_prefix, - electron_dict_prefix, - electron_list_prefix, - generator_prefix, - parameter_prefix, - subscript_prefix, -) -from covalent._shared_files.utils import get_named_params -from covalent._workflow.electron import to_decoded_electron_collection -from covalent._workflow.lattice import Lattice -from covalent._workflow.transport import TransportableObject, _TransportGraph, encode_metadata - -from .._db import update - -app_log = logger.app_log -log_stack_info = logger.log_stack_info - - -def process_node(node: dict) -> dict: - """Convert a node from a 0.110.2-vintage transport graph - - Args: - node: dictionary of node attributes - - Returns: - the converted node attributes - """ - - if "metadata" in node: - node["metadata"] = encode_metadata(node["metadata"]) - if "deps" not in node["metadata"]: - node["metadata"]["deps"] = {} - if "call_before" not in node["metadata"]: - node["metadata"]["call_before"] = [] - if "call_after" not in node["metadata"]: - node["metadata"]["call_after"] = [] - - node_name = node["name"] - - # encode output, remove "attribute_name", strip "attr_prefix" from name - if node_name.startswith(attr_prefix): - node["output"] = TransportableObject.make_transportable(node["output"]) - if "attribute_name" in node: - del node["attribute_name"] - new_node_name = node_name.replace(attr_prefix, "") - node["name"] = new_node_name - - # encode output, remove "key", strip "generator_prefix" from name - elif node_name.startswith(generator_prefix): - node["output"] = TransportableObject.make_transportable(node["output"]) - if "key" in node: - del node["key"] - new_node_name = node_name.replace(generator_prefix, "") - node["name"] = new_node_name - - # encode output, remove "key", strip "subscript_prefix" from name - elif node_name.startswith(subscript_prefix): - node["output"] = TransportableObject.make_transportable(node["output"]) - if "key" in node: - del node["key"] - new_node_name = node_name.replace(subscript_prefix, "") - node["name"] = new_node_name - - # Replace function for collection nodes - elif node_name.startswith(electron_list_prefix) or node_name.startswith(electron_dict_prefix): - node["function"] = TransportableObject(to_decoded_electron_collection) - - # Encode "value" and "output" for parameter nodes - elif node_name.startswith(parameter_prefix): - node["value"] = TransportableObject.make_transportable(node["value"]) - node["output"] = TransportableObject.make_transportable(node["output"]) - - # Function nodes: encode output and sublattice_result - else: - node["output"] = TransportableObject.make_transportable(node["output"]) - if "sublattice_result" in node: - if node["sublattice_result"] is not None: - node["sublattice_result"] = process_result_object(node["sublattice_result"]) - - return node - - -def process_transport_graph(tg: _TransportGraph) -> _TransportGraph: - """Convert a 0.110.2-vintage transport graph to a modern transport graph - - Args: - tg: old Transport Graph - - Returns: - the modernized Transport Graph - """ - tg_new = _TransportGraph() - g = tg.get_internal_graph_copy() - for node_id in g.nodes: - app_log.debug(f"Processing node {node_id}") - process_node(g.nodes[node_id]) - - if tg.lattice_metadata: - tg.lattice_metadata = encode_metadata(tg.lattice_metadata) - - tg_new._graph = g - return tg_new - - -def process_lattice(lattice: Lattice) -> Lattice: - """Convert a "legacy" (0.110.2) Lattice to a modern Lattice - - Args: - lattice: old lattice - - Returns: - the modernized lattice - """ - - workflow_function = lattice.workflow_function - lattice.workflow_function = TransportableObject.make_transportable(workflow_function) - inputs = {"args": lattice.args, "kwargs": lattice.kwargs} - lattice.inputs = TransportableObject(inputs) - - workflow_function = lattice.workflow_function.get_deserialized() - - named_args, named_kwargs = get_named_params(workflow_function, lattice.args, lattice.kwargs) - lattice.named_args = TransportableObject(named_args) - lattice.named_kwargs = TransportableObject(named_kwargs) - - metadata = lattice.metadata - - if "workflow_executor" not in metadata: - metadata["workflow_executor"] = "local" - - metadata = encode_metadata(metadata) - lattice.metadata = metadata - lattice.metadata["deps"] = {} - lattice.metadata["call_before"] = [] - lattice.metadata["call_after"] = [] - - lattice.transport_graph = process_transport_graph(lattice.transport_graph) - lattice.transport_graph.lattice_metadata = lattice.metadata - app_log.debug("Processed transport graph") - - # Delete raw inputs - del lattice.__dict__["args"] - del lattice.__dict__["kwargs"] - - return lattice - - -def process_result_object(result_object: Result) -> Result: - """Convert a "legacy" (0.110.2) Result object to a modern Result object - - Args: - result_object: the old Result object - - Returns: - the modernized result object - """ - - app_log.debug(f"Processing result object for dispatch {result_object.dispatch_id}") - process_lattice(result_object._lattice) - app_log.debug("Processed lattice") - - result_object._result = TransportableObject.make_transportable(result_object._result) - tg = result_object.lattice.transport_graph - for n in tg._graph.nodes: - tg.dirty_nodes.append(n) - - del result_object.__dict__["_inputs"] - return result_object - - -def migrate_pickled_result_object(path: str) -> None: - """Save legacy (0.110.2) result pickle file to a DataStore. - - This first transforms certain legacy properties of the result - object and then persists the result object to the datastore. - - Args: - path: path of the `result.pkl` file - """ - - with open(path, "rb") as f: - result_object = pickle.load(f) - - process_result_object(result_object) - update.persist(result_object) diff --git a/covalent_dispatcher/_cli/service.py b/covalent_dispatcher/_cli/service.py index faa14617d..73d63da0b 100644 --- a/covalent_dispatcher/_cli/service.py +++ b/covalent_dispatcher/_cli/service.py @@ -27,6 +27,7 @@ import sys import time import traceback +import warnings from pathlib import Path from subprocess import DEVNULL, Popen from typing import Optional @@ -50,11 +51,11 @@ from rich.syntax import Syntax from rich.table import Table from rich.text import Text +from sqlalchemy import exc as sa_exc from covalent._shared_files.config import ConfigManager, get_config, reload_config, set_config from .._db.datastore import DataStore -from .migrate import migrate_pickled_result_object UI_PIDFILE = get_config("dispatcher.cache_dir") + "/ui.pid" UI_LOGFILE = get_config("user_interface.log_dir") + "/covalent_ui.log" @@ -67,6 +68,9 @@ ZOMBIE_PROCESS_STATUS_MSG = "Covalent server is unhealthy: Process is in zombie status" STOPPED_PROCESS_STATUS_MSG = "Covalent server is unhealthy: Process is in stopped status" +# Ignore SQLAlchemy warnings +warnings.simplefilter("ignore", category=sa_exc.SAWarning) + def print_header(console): branding_title = Text("Covalent", style="bold blue") @@ -782,17 +786,6 @@ def logs() -> None: ) -@click.command() -@click.argument("result_pickle_path") -def migrate_legacy_result_object(result_pickle_path) -> None: - """Migrate a legacy result object - - Example: `covalent migrate-legacy-result-object result.pkl` - """ - - migrate_pickled_result_object(result_pickle_path) - - # Cluster CLI handlers (client side wrappers for the async handlers exposed # in the dask cluster process) async def _get_cluster_status(uri: str): diff --git a/covalent_dispatcher/_core/dispatcher.py b/covalent_dispatcher/_core/dispatcher.py index e17547969..b0ecd27b6 100644 --- a/covalent_dispatcher/_core/dispatcher.py +++ b/covalent_dispatcher/_core/dispatcher.py @@ -183,6 +183,7 @@ async def _submit_task_group(dispatch_id: str, sorted_nodes: List[int], task_gro app_log.debug("8A: Update node success (run_planned_workflow).") else: + # Nodes whose values have already been resolved known_nodes = [] # Skip the group if all task outputs can be reused from a @@ -196,6 +197,8 @@ async def _submit_task_group(dispatch_id: str, sorted_nodes: List[int], task_gro # Gather inputs for each task and send the task spec sequence to the runner task_specs = [] + sorted_nodes_set = set(sorted_nodes) + for node_id in sorted_nodes: app_log.debug(f"Gathering inputs for task {node_id} (run_planned_workflow).") @@ -214,8 +217,16 @@ async def _submit_task_group(dispatch_id: str, sorted_nodes: List[int], task_gro "args_ids": abs_task_input["args"], "kwargs_ids": abs_task_input["kwargs"], } - known_nodes += abs_task_input["args"] - known_nodes += list(abs_task_input["kwargs"].values()) + # Task inputs that don't belong to the task group have already beeen resolved + external_task_args = filter( + lambda x: x not in sorted_nodes_set, abs_task_input["args"] + ) + known_nodes.extend(external_task_args) + external_task_kwargs = filter( + lambda x: x not in sorted_nodes_set, abs_task_input["kwargs"].values() + ) + known_nodes.extend(external_task_kwargs) + task_specs.append(task_spec) app_log.debug( diff --git a/covalent_dispatcher/_dal/db_interfaces/lattice_utils.py b/covalent_dispatcher/_dal/db_interfaces/lattice_utils.py index 676d0b68c..9871caefb 100644 --- a/covalent_dispatcher/_dal/db_interfaces/lattice_utils.py +++ b/covalent_dispatcher/_dal/db_interfaces/lattice_utils.py @@ -28,10 +28,6 @@ "name", "doc", "inputs", - "named_args", - "named_kwargs", - "cova_imports", - "lattice_imports", } METADATA_KEYS = lattice.LATTICE_METADATA_KEYS.copy() @@ -68,10 +64,6 @@ "workflow_function_string": "function_string_filename", "doc": "docstring_filename", "inputs": "inputs_filename", - "named_args": "named_args_filename", - "named_kwargs": "named_kwargs_filename", - "cova_imports": "cova_imports_filename", - "lattice_imports": "lattice_imports_filename", "executor_data": "executor_data_filename", "workflow_executor_data": "workflow_executor_data_filename", "hooks": "hooks_filename", diff --git a/covalent_dispatcher/_dal/importers/electron.py b/covalent_dispatcher/_dal/importers/electron.py index d4b5047c5..1f3ca51fc 100644 --- a/covalent_dispatcher/_dal/importers/electron.py +++ b/covalent_dispatcher/_dal/importers/electron.py @@ -133,6 +133,10 @@ def import_electron_assets( asset_recs = {} for asset_key, asset in e.assets: + # Register these later + if asset_key == "_custom": + continue + node_storage_path, object_key = object_store.get_uri_components( dispatch_id, e.id, @@ -157,8 +161,8 @@ def import_electron_assets( asset.remote_uri = f"file://{local_uri}" # Register custom assets - if e.custom_assets: - for asset_key, asset in e.custom_assets.items(): + if e.assets._custom: + for asset_key, asset in e.assets._custom.items(): object_key = f"{asset_key}.data" local_uri = os.path.join(node_storage_path, object_key) diff --git a/covalent_dispatcher/_dal/importers/lattice.py b/covalent_dispatcher/_dal/importers/lattice.py index a14938f98..55fa50925 100644 --- a/covalent_dispatcher/_dal/importers/lattice.py +++ b/covalent_dispatcher/_dal/importers/lattice.py @@ -24,16 +24,12 @@ from covalent._shared_files.config import get_config from covalent._shared_files.schemas.lattice import ( - LATTICE_COVA_IMPORTS_FILENAME, LATTICE_DOCSTRING_FILENAME, LATTICE_ERROR_FILENAME, LATTICE_FUNCTION_FILENAME, LATTICE_FUNCTION_STRING_FILENAME, LATTICE_HOOKS_FILENAME, LATTICE_INPUTS_FILENAME, - LATTICE_LATTICE_IMPORTS_FILENAME, - LATTICE_NAMED_ARGS_FILENAME, - LATTICE_NAMED_KWARGS_FILENAME, LATTICE_RESULTS_FILENAME, LATTICE_STORAGE_TYPE, LatticeAssets, @@ -71,12 +67,8 @@ def _get_lattice_meta(lat: LatticeSchema, storage_path) -> dict: "function_string_filename": LATTICE_FUNCTION_STRING_FILENAME, "error_filename": LATTICE_ERROR_FILENAME, "inputs_filename": LATTICE_INPUTS_FILENAME, - "named_args_filename": LATTICE_NAMED_ARGS_FILENAME, - "named_kwargs_filename": LATTICE_NAMED_KWARGS_FILENAME, "results_filename": LATTICE_RESULTS_FILENAME, "hooks_filename": LATTICE_HOOKS_FILENAME, - "cova_imports_filename": LATTICE_COVA_IMPORTS_FILENAME, - "lattice_imports_filename": LATTICE_LATTICE_IMPORTS_FILENAME, } kwargs.update(legacy_kwargs) return kwargs @@ -94,6 +86,10 @@ def import_lattice_assets( # Register built-in assets for asset_key, asset in lat.assets: + # Deal with these later + if asset_key == "_custom": + continue + storage_path, object_key = object_store.get_uri_components( dispatch_id=dispatch_id, node_id=None, @@ -118,8 +114,8 @@ def import_lattice_assets( asset.remote_uri = f"file://{local_uri}" # Register custom assets - if lat.custom_assets: - for asset_key, asset in lat.custom_assets.items(): + if lat.assets._custom: + for asset_key, asset in lat.assets._custom.items(): object_key = f"{asset_key}.data" local_uri = os.path.join(storage_path, object_key) diff --git a/covalent_dispatcher/_dal/importers/result.py b/covalent_dispatcher/_dal/importers/result.py index 395516b86..7e4bd36f9 100644 --- a/covalent_dispatcher/_dal/importers/result.py +++ b/covalent_dispatcher/_dal/importers/result.py @@ -72,7 +72,6 @@ def import_result( # Main case: insert new lattice, electron, edge, and job records storage_path = os.path.join(base_path, dispatch_id) - os.makedirs(storage_path) lattice_record_kwargs = _get_result_meta(res, storage_path, electron_id) lattice_record_kwargs.update(_get_lattice_meta(res.lattice, storage_path)) @@ -143,6 +142,7 @@ def _connect_result_to_electron( fields={"id", "cancel_requested"}, equality_filters={"id": parent_electron_record.job_id}, membership_filters={}, + for_update=True, )[0] cancel_requested = parent_job_record.cancel_requested diff --git a/covalent_dispatcher/_dal/importers/tg.py b/covalent_dispatcher/_dal/importers/tg.py index 468abdadf..c67cc34b9 100644 --- a/covalent_dispatcher/_dal/importers/tg.py +++ b/covalent_dispatcher/_dal/importers/tg.py @@ -51,7 +51,9 @@ def import_transport_graph( # Propagate parent electron id's `cancel_requested` property to the sublattice electrons if electron_id is not None: parent_e_record = Electron.meta_type.get_by_primary_key(session, electron_id) - job_record = Job.get_by_primary_key(session=session, primary_key=parent_e_record.job_id) + job_record = Job.get_by_primary_key( + session=session, primary_key=parent_e_record.job_id, for_update=True + ) cancel_requested = job_record.cancel_requested else: cancel_requested = False diff --git a/covalent_dispatcher/_db/dispatchdb.py b/covalent_dispatcher/_db/dispatchdb.py index 621022777..e78a02d6d 100644 --- a/covalent_dispatcher/_db/dispatchdb.py +++ b/covalent_dispatcher/_db/dispatchdb.py @@ -20,7 +20,6 @@ from datetime import datetime import networkx as nx -import simplejson import covalent.executor as covalent_executor from covalent._shared_files import logger @@ -125,38 +124,6 @@ def result_encoder(obj): return str(obj) -def encode_result(result_obj): - lattice = result_obj.lattice - - result_string = result_obj.encoded_result.json - if not result_string: - result_string = result_obj.encoded_result.object_string - - named_args = {k: v.object_string for k, v in lattice.named_args.items()} - named_kwargs = {k: v.object_string for k, v in lattice.named_kwargs.items()} - result_dict = { - "dispatch_id": result_obj.dispatch_id, - "status": result_obj.status, - "result": result_string, - "start_time": result_obj.start_time, - "end_time": result_obj.end_time, - "results_dir": result_obj.results_dir, - "error": result_obj.error, - "lattice": { - "function_string": lattice.workflow_function_string, - "doc": lattice.__doc__, - "name": lattice.__name__, - "inputs": encode_dict({**named_args, **named_kwargs}), - "metadata": extract_metadata(lattice.metadata), - }, - "graph": extract_graph(result_obj.lattice.transport_graph._graph), - } - - jsonified_result = simplejson.dumps(result_dict, default=result_encoder, ignore_nan=True) - - return jsonified_result - - class DispatchDB: """ Wrapper for the database of workflows. diff --git a/covalent_dispatcher/_db/models.py b/covalent_dispatcher/_db/models.py index 7e0521c35..e61f725ef 100644 --- a/covalent_dispatcher/_db/models.py +++ b/covalent_dispatcher/_db/models.py @@ -92,10 +92,10 @@ class Lattice(Base): # Name of the file containing the serialized input data inputs_filename = Column(Text) - # Name of the file containing the serialized named args + # DEPRECATED: Name of the file containing the serialized named args named_args_filename = Column(Text) - # Name of the file containing the serialized named kwargs + # DEPRECATED: Name of the file containing the serialized named kwargs named_kwargs_filename = Column(Text) # name of the file containing the serialized output @@ -104,10 +104,10 @@ class Lattice(Base): # Name of the file containing the default electron hooks hooks_filename = Column(Text) - # Name of the file containing the set of cova imports + # DEPRECATED: Name of the file containing the set of cova imports cova_imports_filename = Column(Text) - # Name of the file containing the set of lattice imports + # DEPRECATED: Name of the file containing the set of lattice imports lattice_imports_filename = Column(Text) # Results directory (will be deprecated soon) diff --git a/covalent_dispatcher/_db/upsert.py b/covalent_dispatcher/_db/upsert.py index 3bd7f0ca7..70ef99a45 100644 --- a/covalent_dispatcher/_db/upsert.py +++ b/covalent_dispatcher/_db/upsert.py @@ -57,12 +57,8 @@ LATTICE_DOCSTRING_FILENAME = LATTICE_FILENAMES["doc"] LATTICE_ERROR_FILENAME = LATTICE_FILENAMES["error"] LATTICE_INPUTS_FILENAME = LATTICE_FILENAMES["inputs"] -LATTICE_NAMED_ARGS_FILENAME = LATTICE_FILENAMES["named_args"] -LATTICE_NAMED_KWARGS_FILENAME = LATTICE_FILENAMES["named_kwargs"] LATTICE_RESULTS_FILENAME = LATTICE_FILENAMES["result"] LATTICE_HOOKS_FILENAME = LATTICE_FILENAMES["hooks"] -LATTICE_COVA_IMPORTS_FILENAME = LATTICE_FILENAMES["cova_imports"] -LATTICE_LATTICE_IMPORTS_FILENAME = LATTICE_FILENAMES["lattice_imports"] LATTICE_STORAGE_TYPE = "file" CUSTOM_ASSETS_FIELD = "custom_asset_keys" @@ -108,12 +104,8 @@ def _lattice_data(session: Session, result: Result, electron_id: int = None) -> ("doc", LATTICE_DOCSTRING_FILENAME, result.lattice.__doc__), ("error", LATTICE_ERROR_FILENAME, result.error), ("inputs", LATTICE_INPUTS_FILENAME, result.lattice.inputs), - ("named_args", LATTICE_NAMED_ARGS_FILENAME, result.lattice.named_args), - ("named_kwargs", LATTICE_NAMED_KWARGS_FILENAME, result.lattice.named_kwargs), ("result", LATTICE_RESULTS_FILENAME, result._result), ("hooks", LATTICE_HOOKS_FILENAME, result.lattice.metadata["hooks"]), - ("cova_imports", LATTICE_COVA_IMPORTS_FILENAME, result.lattice.cova_imports), - ("lattice_imports", LATTICE_LATTICE_IMPORTS_FILENAME, result.lattice.lattice_imports), ]: digest, size = local_store.store_file(data_storage_path, filename, data) asset_record_kwargs = { @@ -161,12 +153,8 @@ def _lattice_data(session: Session, result: Result, electron_id: int = None) -> "workflow_executor_data": json.dumps(result.lattice.metadata["workflow_executor_data"]), "error_filename": LATTICE_ERROR_FILENAME, "inputs_filename": LATTICE_INPUTS_FILENAME, - "named_args_filename": LATTICE_NAMED_ARGS_FILENAME, - "named_kwargs_filename": LATTICE_NAMED_KWARGS_FILENAME, "results_filename": LATTICE_RESULTS_FILENAME, "hooks_filename": LATTICE_HOOKS_FILENAME, - "cova_imports_filename": LATTICE_COVA_IMPORTS_FILENAME, - "lattice_imports_filename": LATTICE_LATTICE_IMPORTS_FILENAME, "results_dir": results_dir, "root_dispatch_id": result.root_dispatch_id, "python_version": result.lattice.python_version, diff --git a/covalent_dispatcher/_db/write_result_to_db.py b/covalent_dispatcher/_db/write_result_to_db.py index 9d928c1ec..08da952ca 100644 --- a/covalent_dispatcher/_db/write_result_to_db.py +++ b/covalent_dispatcher/_db/write_result_to_db.py @@ -95,12 +95,8 @@ def transaction_insert_lattices_data( workflow_executor_data: str, error_filename: str, inputs_filename: str, - named_args_filename: str, - named_kwargs_filename: str, results_filename: str, hooks_filename: str, - cova_imports_filename: str, - lattice_imports_filename: str, results_dir: str, root_dispatch_id: str, created_at: dt, @@ -133,12 +129,8 @@ def transaction_insert_lattices_data( workflow_executor_data=workflow_executor_data, error_filename=error_filename, inputs_filename=inputs_filename, - named_args_filename=named_args_filename, - named_kwargs_filename=named_kwargs_filename, results_filename=results_filename, hooks_filename=hooks_filename, - cova_imports_filename=cova_imports_filename, - lattice_imports_filename=lattice_imports_filename, results_dir=results_dir, root_dispatch_id=root_dispatch_id, is_active=True, diff --git a/covalent_dispatcher/_service/app.py b/covalent_dispatcher/_service/app.py index 9a9c7d460..03e71186d 100644 --- a/covalent_dispatcher/_service/app.py +++ b/covalent_dispatcher/_service/app.py @@ -191,7 +191,7 @@ async def register(manifest: ResultSchema) -> ResultSchema: ) from e -@router.post("/dispatches/{dispatch_id}/subdispatches", status_code=201) +@router.post("/dispatches/{dispatch_id}/sublattices", status_code=201) async def register_subdispatch( manifest: ResultSchema, dispatch_id: str, diff --git a/covalent_dispatcher/_service/models.py b/covalent_dispatcher/_service/models.py index 2d2f7db10..18a33a071 100644 --- a/covalent_dispatcher/_service/models.py +++ b/covalent_dispatcher/_service/models.py @@ -41,11 +41,7 @@ class LatticeAssetKey(str, Enum): workflow_function_string = "workflow_function_string" doc = "doc" inputs = "inputs" - named_args = "named_args" - named_kwargs = "named_kwargs" hooks = "hooks" - cova_imports = "cova_imports" - lattice_imports = "lattice_imports" class ElectronAssetKey(str, Enum): diff --git a/covalent_ui/api/v1/data_layer/electron_dal.py b/covalent_ui/api/v1/data_layer/electron_dal.py index 5f85c965a..9e10ee154 100644 --- a/covalent_ui/api/v1/data_layer/electron_dal.py +++ b/covalent_ui/api/v1/data_layer/electron_dal.py @@ -28,7 +28,6 @@ from covalent._results_manager.results_manager import get_result from covalent._shared_files import logger from covalent._shared_files.config import get_config -from covalent.quantum.qserver.database import Database from covalent_dispatcher._core.execution import _get_task_inputs as get_task_inputs from covalent_ui.api.v1.data_layer.lattice_dal import Lattices from covalent_ui.api.v1.database.schema.electron import Electron @@ -322,11 +321,17 @@ def get_electron_inputs(self, dispatch_id: uuid.UUID, electron_id: int) -> str: def _get_qelectron_db_dict(self, dispatch_id: str, node_id: int) -> dict: """Return the QElectron DB for a given node.""" - electron = self.get_electrons_id(dispatch_id, node_id) + try: + from covalent.quantum.qserver.database import Database - database = Database(electron.storage_path) - qelectron_db_dict = database.get_db_dict( - dispatch_id=dispatch_id, node_id=node_id, direct_path=True - ) + electron = self.get_electrons_id(dispatch_id, node_id) + + database = Database(electron.storage_path) + qelectron_db_dict = database.get_db_dict( + dispatch_id=dispatch_id, node_id=node_id, direct_path=True + ) - return qelectron_db_dict + return qelectron_db_dict + except ImportError: + app_log.debug("QElectron not installed.") + return {} diff --git a/covalent_ui/app.py b/covalent_ui/app.py index bf1d473eb..aa09d8854 100644 --- a/covalent_ui/app.py +++ b/covalent_ui/app.py @@ -133,7 +133,6 @@ def get_home(request: Request, rest_of_path: str): app_name, host=host, port=port, - debug=DEBUG, reload=RELOAD, log_config=log_config(), ) diff --git a/covalent_ui/result_webhook.py b/covalent_ui/result_webhook.py index 3caf03c10..f5d311421 100644 --- a/covalent_ui/result_webhook.py +++ b/covalent_ui/result_webhook.py @@ -22,7 +22,7 @@ import covalent_ui.app as ui_server from covalent._results_manager import Result from covalent._shared_files import logger -from covalent._shared_files.utils import get_ui_url +from covalent._shared_files.utils import get_named_params, get_ui_url from covalent_dispatcher._db.dispatchdb import encode_dict, extract_graph, extract_metadata app_log = logger.app_log @@ -78,8 +78,11 @@ def send_draw_request(lattice) -> None: graph = lattice.transport_graph.get_internal_graph_copy() - named_args = lattice.named_args.get_deserialized() - named_kwargs = lattice.named_kwargs.get_deserialized() + inputs = lattice.inputs.get_deserialized() + fn = lattice.workflow_function.get_deserialized() + args = inputs["args"] + kwargs = inputs["kwargs"] + named_args, named_kwargs = get_named_params(fn, args, kwargs) draw_request = json.dumps( { diff --git a/doc/source/_static/abstract_infra.png b/doc/source/_static/abstract_infra.png new file mode 100644 index 000000000..dfe3aefb7 Binary files /dev/null and b/doc/source/_static/abstract_infra.png differ diff --git a/doc/source/_static/ai.svg b/doc/source/_static/ai.svg new file mode 100644 index 000000000..dbcb403dc --- /dev/null +++ b/doc/source/_static/ai.svg @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/ai_tutorial.svg b/doc/source/_static/ai_tutorial.svg new file mode 100644 index 000000000..15916d38d --- /dev/null +++ b/doc/source/_static/ai_tutorial.svg @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/aws.svg b/doc/source/_static/aws.svg new file mode 100644 index 000000000..6d7ab84fc --- /dev/null +++ b/doc/source/_static/aws.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/azure.svg b/doc/source/_static/azure.svg new file mode 100644 index 000000000..144bcc0e2 --- /dev/null +++ b/doc/source/_static/azure.svg @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/banner_executor.gif b/doc/source/_static/banner_executor.gif new file mode 100644 index 000000000..05f6c5abb Binary files /dev/null and b/doc/source/_static/banner_executor.gif differ diff --git a/doc/source/_static/cloud_hosted.svg b/doc/source/_static/cloud_hosted.svg new file mode 100644 index 000000000..55138a28c --- /dev/null +++ b/doc/source/_static/cloud_hosted.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/doc/source/_static/cloud_hpc.svg b/doc/source/_static/cloud_hpc.svg new file mode 100644 index 000000000..e0a4637ba --- /dev/null +++ b/doc/source/_static/cloud_hpc.svg @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/code_snippet.svg b/doc/source/_static/code_snippet.svg new file mode 100644 index 000000000..e17f9e330 --- /dev/null +++ b/doc/source/_static/code_snippet.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/doc/source/_static/computing.svg b/doc/source/_static/computing.svg new file mode 100644 index 000000000..a853fb2b6 --- /dev/null +++ b/doc/source/_static/computing.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/doc/source/_static/concepts.svg b/doc/source/_static/concepts.svg new file mode 100644 index 000000000..f6865871d --- /dev/null +++ b/doc/source/_static/concepts.svg @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/concepts_of_covalent.svg b/doc/source/_static/concepts_of_covalent.svg new file mode 100644 index 000000000..68df9c1a5 --- /dev/null +++ b/doc/source/_static/concepts_of_covalent.svg @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/copy.svg b/doc/source/_static/copy.svg new file mode 100644 index 000000000..ded49db97 --- /dev/null +++ b/doc/source/_static/copy.svg @@ -0,0 +1,4 @@ + + + + diff --git a/doc/source/_static/covalent_work.svg b/doc/source/_static/covalent_work.svg new file mode 100644 index 000000000..9ad4cea27 --- /dev/null +++ b/doc/source/_static/covalent_work.svg @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/dask.svg b/doc/source/_static/dask.svg new file mode 100644 index 000000000..11d5e91bd --- /dev/null +++ b/doc/source/_static/dask.svg @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/deployment.svg b/doc/source/_static/deployment.svg new file mode 100644 index 000000000..77f087d6c --- /dev/null +++ b/doc/source/_static/deployment.svg @@ -0,0 +1,4 @@ + + + + diff --git a/doc/source/_static/development.svg b/doc/source/_static/development.svg new file mode 100644 index 000000000..7dc9b7d27 --- /dev/null +++ b/doc/source/_static/development.svg @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/documentation.svg b/doc/source/_static/documentation.svg new file mode 100644 index 000000000..4bcfde9d4 --- /dev/null +++ b/doc/source/_static/documentation.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/doc/source/_static/examples.svg b/doc/source/_static/examples.svg new file mode 100644 index 000000000..b2f171038 --- /dev/null +++ b/doc/source/_static/examples.svg @@ -0,0 +1,4 @@ + + + + diff --git a/doc/source/_static/executor.gif b/doc/source/_static/executor.gif new file mode 100644 index 000000000..e82724d3c Binary files /dev/null and b/doc/source/_static/executor.gif differ diff --git a/doc/source/_static/executors.svg b/doc/source/_static/executors.svg new file mode 100644 index 000000000..f11e472f6 --- /dev/null +++ b/doc/source/_static/executors.svg @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/executors_ship.png b/doc/source/_static/executors_ship.png new file mode 100644 index 000000000..10f8680a3 Binary files /dev/null and b/doc/source/_static/executors_ship.png differ diff --git a/doc/source/_static/getting_started.svg b/doc/source/_static/getting_started.svg new file mode 100644 index 000000000..3a2f3a212 --- /dev/null +++ b/doc/source/_static/getting_started.svg @@ -0,0 +1,4 @@ + + + + diff --git a/doc/source/_static/google.svg b/doc/source/_static/google.svg new file mode 100644 index 000000000..6ef470625 --- /dev/null +++ b/doc/source/_static/google.svg @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/high_compute.svg b/doc/source/_static/high_compute.svg new file mode 100644 index 000000000..6dff0ed22 --- /dev/null +++ b/doc/source/_static/high_compute.svg @@ -0,0 +1,4 @@ + + + + diff --git a/doc/source/_static/hpc.svg b/doc/source/_static/hpc.svg new file mode 100644 index 000000000..35d605858 --- /dev/null +++ b/doc/source/_static/hpc.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/doc/source/_static/ibmq.svg b/doc/source/_static/ibmq.svg new file mode 100644 index 000000000..d8169036b --- /dev/null +++ b/doc/source/_static/ibmq.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/kubernetes.svg b/doc/source/_static/kubernetes.svg new file mode 100644 index 000000000..80e3aa6b3 --- /dev/null +++ b/doc/source/_static/kubernetes.svg @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/local-laptop.svg b/doc/source/_static/local-laptop.svg new file mode 100644 index 000000000..fb5a0f5f0 --- /dev/null +++ b/doc/source/_static/local-laptop.svg @@ -0,0 +1,4 @@ + + + + diff --git a/doc/source/_static/many_more.svg b/doc/source/_static/many_more.svg new file mode 100644 index 000000000..cff85a5c9 --- /dev/null +++ b/doc/source/_static/many_more.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/doc/source/_static/mnist.svg b/doc/source/_static/mnist.svg new file mode 100644 index 000000000..4615c109a --- /dev/null +++ b/doc/source/_static/mnist.svg @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/mnist_tutorial.svg b/doc/source/_static/mnist_tutorial.svg new file mode 100644 index 000000000..6123c11b4 --- /dev/null +++ b/doc/source/_static/mnist_tutorial.svg @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/onprem_hosted.svg b/doc/source/_static/onprem_hosted.svg new file mode 100644 index 000000000..f83852369 --- /dev/null +++ b/doc/source/_static/onprem_hosted.svg @@ -0,0 +1,4 @@ + + + + diff --git a/doc/source/_static/orchestration.svg b/doc/source/_static/orchestration.svg new file mode 100644 index 000000000..141cdfebe --- /dev/null +++ b/doc/source/_static/orchestration.svg @@ -0,0 +1,4 @@ + + + + diff --git a/doc/source/_static/quantum_tutorial.svg b/doc/source/_static/quantum_tutorial.svg new file mode 100644 index 000000000..02a4762f0 --- /dev/null +++ b/doc/source/_static/quantum_tutorial.svg @@ -0,0 +1,58 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/readmeVid-gif.gif b/doc/source/_static/readmeVid-gif.gif new file mode 100644 index 000000000..ac5d486e1 Binary files /dev/null and b/doc/source/_static/readmeVid-gif.gif differ diff --git a/doc/source/_static/readmeVid.mp4 b/doc/source/_static/readmeVid.mp4 new file mode 100644 index 000000000..2724a2681 Binary files /dev/null and b/doc/source/_static/readmeVid.mp4 differ diff --git a/doc/source/_static/readme_executor.svg b/doc/source/_static/readme_executor.svg new file mode 100644 index 000000000..5fca0a837 --- /dev/null +++ b/doc/source/_static/readme_executor.svg @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/readme_hero.svg b/doc/source/_static/readme_hero.svg new file mode 100644 index 000000000..af229fb0a --- /dev/null +++ b/doc/source/_static/readme_hero.svg @@ -0,0 +1,80 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/self.svg b/doc/source/_static/self.svg new file mode 100644 index 000000000..cbebfdace --- /dev/null +++ b/doc/source/_static/self.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/doc/source/_static/serverless-illustration.png b/doc/source/_static/serverless-illustration.png new file mode 100644 index 000000000..952688422 Binary files /dev/null and b/doc/source/_static/serverless-illustration.png differ diff --git a/doc/source/_static/slack.svg b/doc/source/_static/slack.svg new file mode 100644 index 000000000..03f4ef0a4 --- /dev/null +++ b/doc/source/_static/slack.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/doc/source/_static/slurm.svg b/doc/source/_static/slurm.svg new file mode 100644 index 000000000..700fe19a0 --- /dev/null +++ b/doc/source/_static/slurm.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/doc/source/_static/tutorial_heading.svg b/doc/source/_static/tutorial_heading.svg new file mode 100644 index 000000000..8c3784321 --- /dev/null +++ b/doc/source/_static/tutorial_heading.svg @@ -0,0 +1,46 @@ + + + +
+ +
+ Tutorials +
+
+
+
diff --git a/doc/source/_static/what_is_covalent.svg b/doc/source/_static/what_is_covalent.svg new file mode 100644 index 000000000..fde5bb444 --- /dev/null +++ b/doc/source/_static/what_is_covalent.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/doc/source/_static/work.svg b/doc/source/_static/work.svg new file mode 100644 index 000000000..117caacc9 --- /dev/null +++ b/doc/source/_static/work.svg @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/workflow.svg b/doc/source/_static/workflow.svg new file mode 100644 index 000000000..6dd6831e7 --- /dev/null +++ b/doc/source/_static/workflow.svg @@ -0,0 +1,43 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/source/_static/workflows.svg b/doc/source/_static/workflows.svg new file mode 100644 index 000000000..0266074fc --- /dev/null +++ b/doc/source/_static/workflows.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/doc/source/api/cli.rst b/doc/source/api/cli.rst index 1dd83f117..22b5c1fea 100644 --- a/doc/source/api/cli.rst +++ b/doc/source/api/cli.rst @@ -5,5 +5,5 @@ The command line interface (CLI) tool is used to manage the Covalent server. .. click:: covalent_dispatcher._cli.cli:cli :prog: covalent - :commands: start,stop,restart,status,purge,logs,db,migrate-legacy-result-object,cluster + :commands: start,stop,restart,status,purge,logs,db,cluster :nested: full diff --git a/doc/source/api/executors/slurm.rst b/doc/source/api/executors/slurm.rst index e9df75496..1412cd7d8 100644 --- a/doc/source/api/executors/slurm.rst +++ b/doc/source/api/executors/slurm.rst @@ -133,27 +133,6 @@ Here the corresponding submit script contains the following commands: srun --ntasks-per-node 1 dcgmi profile --resume +.. note:: -sshproxy --------- - -Some users may need two-factor authentication (2FA) to connect to a cluster. This plugin supports one form of 2FA using the `sshproxy `_ service developed by NERSC. When this plugin is configured to support ``sshproxy``, the user's SSH key and certificate will be refreshed automatically by Covalent if either it does not exist or it is expired. We assume that the user has already `configured 2FA `_, used the ``sshproxy`` service on the command line without issue, and added the executable to their ``PATH``. Note that this plugin assumes the script is called ``sshproxy``, not ``sshproxy.sh``. Further note that using ``sshproxy`` within Covalent is not required; a user can still run it manually and provide ``ssh_key_file`` and ``cert_file`` in the plugin constructor. - -In order to enable ``sshproxy`` in this plugin, add the following block to your Covalent configuration while the server is stopped: - -.. code:: bash - - [executors.slurm.sshproxy] - hosts = [ "perlmutter-p1.nersc.gov" ] - password = "" - secret = "" - -For details on how to modify your Covalent configuration, refer to the documentation `here `_. - -Then, reinstall this plugin using ``pip install covalent-slurm-plugin[sshproxy]`` in order to pull in the ``oathtool`` package which will generate one-time passwords. - -The ``hosts`` parameter is a list of hostnames for which the ``sshproxy`` service will be used. If the address provided in the plugin constructor is not present in this list, ``sshproxy`` will not be used. The ``password`` is the user's password, not including the 6-digit OTP. The ``secret`` is the 2FA secret provided when a user registers a new device on `Iris `_. Rather than scan the QR code into an authenticator app, inspect the Oath Seed URL for a string labeled ``secret=...``, typically consisting of numbers and capital letters. Users can validate that correct OTP codes are being generated by using the command ``oathtool `` and using the 6-digit number returned in the "Test" option on the Iris 2FA page. Note that these values are stored in plaintext in the Covalent configuration file. If a user suspects credentials have been stolen or compromised, contact your systems administrator immediately to report the incident and request deactivation. - -.. autoclass:: covalent_slurm_plugin.SlurmExecutor - :members: - :inherited-members: +Each electron that uses the Slurm executor opens a separate SSH connection to the remote system. When executing 10 or more concurrent electrons, be mindful of client and/or server-side limitations on the total number of SSH connections. diff --git a/doc/source/tutorials/0_ClassicalMachineLearning/genai/requirements.txt b/doc/source/tutorials/0_ClassicalMachineLearning/genai/requirements.txt index d036f6ae7..1162956d1 100644 --- a/doc/source/tutorials/0_ClassicalMachineLearning/genai/requirements.txt +++ b/doc/source/tutorials/0_ClassicalMachineLearning/genai/requirements.txt @@ -7,5 +7,5 @@ Pillow==9.5.0 sentencepiece==0.1.99 streamlit==1.25.0 torch==2.0.1 -transformers==4.31.0 +transformers==4.36.0 xformers==0.0.21 diff --git a/doc/source/version_migrations/index.rst b/doc/source/version_migrations/index.rst index 48995ae73..a93a38901 100644 --- a/doc/source/version_migrations/index.rst +++ b/doc/source/version_migrations/index.rst @@ -52,20 +52,4 @@ If you are using Covalent v0.110.2 or later you can upgrade to Covalent v0.177.0 $ covalent start Covalent server has started at http://localhost:48008 - -6. Use the data migration tool to migrate any workflows you want to port to the new version of Covalent. - - For example, for a workflow with dispatch ID :code:`e0ba03a2-fdc0-474e-9997-7fa8e82932c5`: - - .. code:: bash - - $ covalent migrate-legacy-result-object ./results/e0ba03a2-fdc0-474e-9997-7fa8e82932c5/result.pkl - Processing result object for dispatch e0ba03a2-fdc0-474e-9997-7fa8e82932c5 - Processing node 0 - Processing node 1 - Processing node 2 - Processing node 3 - Processed transport graph - Processed lattice - -7. Navigate to the UI (http://localhost:48008) to view your workflows. +6. Navigate to the UI (http://localhost:48008) to view your workflows. diff --git a/requirements-client.txt b/requirements-client.txt index ede6a20e3..1d74ffb04 100644 --- a/requirements-client.txt +++ b/requirements-client.txt @@ -7,6 +7,5 @@ furl>=2.1.3 networkx>=2.8.6 pydantic>=2.1.1 requests>=2.24.0 -simplejson>=3.17.6 toml>=0.10.2 watchdog>=2.0.3 diff --git a/requirements-qelectron.txt b/requirements-qelectron.txt new file mode 100644 index 000000000..98b537b50 --- /dev/null +++ b/requirements-qelectron.txt @@ -0,0 +1,4 @@ +lmdbm>=0.0.5 +mpire>=2.7.1 +orjson>=3.8.10 +pennylane>=0.31.1,<0.33.0 diff --git a/requirements.txt b/requirements.txt index 886884819..15ecc9091 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,23 +8,17 @@ dask[distributed]>=2022.6.0 fastapi>=0.100.0 filelock>=3.12.2 furl>=2.1.3 -lmdbm>=0.0.5 -mpire>=2.7.1 natsort>=8.4.0 networkx>=2.8.6 -orjson>=3.8.10 -pennylane>=0.31.1,<0.33.0 psutil>=5.9.0 pydantic>=2.1.1 python-multipart>=0.0.6 python-socketio>=5.7.1 requests>=2.24.0 rich>=12.0.0,<=13.3.5 -simplejson>=3.17.6 sqlalchemy>=1.4.37,<2.0.0 sqlalchemy_utils>=0.38.3 toml>=0.10.2 typing-extensions>=4.8.0 -uvicorn[standard]==0.18.3 +uvicorn[standard] watchdog>=2.2.1 -werkzeug>=2.0.3 diff --git a/setup.py b/setup.py index 50e3d6b63..6762fdf6d 100644 --- a/setup.py +++ b/setup.py @@ -28,6 +28,9 @@ with open("VERSION") as f: version = f.read().strip() +# Allow installing a particular commit for testing +commit_sha = os.getenv("COVALENT_COMMIT_SHA") +artifact_id = commit_sha if commit_sha else f"v{version}" requirements_file = "requirements.txt" exclude_modules = [ @@ -55,6 +58,11 @@ with open(requirements_file) as f: required = f.read().splitlines() +# By default we don't install qelectron requirements +# and only install them as an extra +with open("requirements-qelectron.txt") as f: + qelectron_reqs = f.read().splitlines() + def recursively_append_files(directory: str): """ @@ -197,7 +205,7 @@ def find_sources(self): "version": version, "maintainer": "Agnostiq", "url": "https://github.com/AgnostiqHQ/covalent", - "download_url": f"https://github.com/AgnostiqHQ/covalent/archive/v{version}.tar.gz", + "download_url": f"https://github.com/AgnostiqHQ/covalent/archive/{artifact_id}.tar.gz", "license": "Apache License 2.0", "author": "Agnostiq", "author_email": "support@agnostiq.ai", @@ -220,6 +228,7 @@ def find_sources(self): "qiskit-ibm-provider==0.6.1", "qiskit-ibm-runtime==0.10.0", ], + "quantum": qelectron_reqs, }, "classifiers": [ "Development Status :: 4 - Beta", diff --git a/tests/covalent_dispatcher_tests/_cli/cli_test.py b/tests/covalent_dispatcher_tests/_cli/cli_test.py index ec02aa3f0..aac119712 100644 --- a/tests/covalent_dispatcher_tests/_cli/cli_test.py +++ b/tests/covalent_dispatcher_tests/_cli/cli_test.py @@ -61,7 +61,6 @@ def test_cli_commands(): "db", "deploy", "logs", - "migrate-legacy-result-object", "purge", "restart", "start", @@ -184,22 +183,14 @@ def test_deploy_up(mocker): "covalent_dispatcher._cli.groups.deploy_group._run_command_and_show_output", ) - # Fail with invalid command options. - mocker.patch( - "covalent_dispatcher._cli.groups.deploy_group.validate_args", - return_value="Non-empty msg", - ) + # Fail with invalid executor name with pytest.raises(SystemExit) as exc_info: ctx = click.Context(up) - ctx.invoke(up) + ctx.invoke(up, executor_name="invalid") assert exc_info.value.code == 1 # Succeed but exit after help message. - mocker.patch( - "covalent_dispatcher._cli.groups.deploy_group.validate_args", - return_value=None, - ) mocker.patch( "covalent_dispatcher._cli.groups.deploy_group.get_crm_object", ) @@ -227,6 +218,15 @@ def test_deploy_down(mocker): mock_run_command_and_show_output = mocker.patch( "covalent_dispatcher._cli.groups.deploy_group._run_command_and_show_output", ) + + # Fail with invalid executor name + with pytest.raises(SystemExit) as exc_info: + ctx = click.Context(down) + ctx.invoke(down, executor_name="invalid") + + assert exc_info.value.code == 1 + + # Succeed with valid command options. mocker.patch( "covalent_dispatcher._cli.groups.deploy_group.get_crm_object", ) diff --git a/tests/covalent_dispatcher_tests/_cli/migrate_test.py b/tests/covalent_dispatcher_tests/_cli/migrate_test.py deleted file mode 100644 index 18289bcca..000000000 --- a/tests/covalent_dispatcher_tests/_cli/migrate_test.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright 2021 Agnostiq Inc. -# -# This file is part of Covalent. -# -# Licensed under the Apache License 2.0 (the "License"). A copy of the -# License may be obtained with this software package or at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Use of this file is prohibited except in compliance with the License. -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Testing results_dir migration script""" - -import pickle -from pathlib import Path - -from covalent._results_manager import Result -from covalent._shared_files.defaults import attr_prefix, generator_prefix, subscript_prefix -from covalent._workflow.transport import TransportableObject, _TransportGraph -from covalent_dispatcher._cli.migrate import ( - migrate_pickled_result_object, - process_lattice, - process_node, - process_result_object, - process_transport_graph, - to_decoded_electron_collection, -) - -dispatch_id = "652dc473-fa37-4846-85f3-b314204fd432" -sub_dispatch_id = "c333d0b3-8711-4595-9374-421f5482a592" - -basedir = Path(__file__).parent -sample_results_dir = basedir / Path("sample_results_dir") -result_pkl = sample_results_dir / dispatch_id / "result.pkl" - -# task node 0, parameter node 1 -# attribute node 2 -# sublattice node 3 -# task node 4, generator nodes 5, 6 -# subscript node 7 - - -def get_sample_result_object(): - with open(result_pkl, "rb") as f: - result_object = pickle.load(f) - return result_object - - -def compare_nodes_and_edges(tg_orig: _TransportGraph, tg_new: _TransportGraph): - """Convenience function for comparing a legacy transport graph with a processed one.""" - - # Check metadata - for n in tg_new._graph.nodes: - metadata = tg_new._graph.nodes[n]["metadata"] - assert "deps" in metadata - assert "call_before" in metadata - assert "call_after" in metadata - - # Check other node attributes - task_node = tg_new._graph.nodes[0] - orig_output = tg_orig._graph.nodes[0]["output"] - - assert isinstance(task_node["output"], TransportableObject) - assert task_node["output"].get_deserialized().__dict__ == orig_output.__dict__ - - collection_node = tg_new._graph.nodes[1] - assert ( - collection_node["function"].get_serialized() - == TransportableObject(to_decoded_electron_collection).get_serialized() - ) - - param_node = tg_new._graph.nodes[2] - orig_output = tg_orig._graph.nodes[2]["output"] - orig_value = tg_orig._graph.nodes[2]["value"] - - assert isinstance(param_node["output"], TransportableObject) - assert isinstance(param_node["value"], TransportableObject) - assert param_node["output"].get_deserialized() == orig_output - - param_node = tg_new._graph.nodes[3] - orig_output = tg_orig._graph.nodes[3]["output"] - orig_value = tg_orig._graph.nodes[3]["value"] - - assert isinstance(param_node["output"], TransportableObject) - assert isinstance(param_node["value"], TransportableObject) - assert param_node["output"].get_deserialized() == orig_output - - attr_node = tg_new._graph.nodes[4] - orig_output = tg_orig._graph.nodes[4]["output"] - - assert isinstance(attr_node["output"], TransportableObject) - assert attr_node["output"].get_deserialized() == orig_output - assert "attribute_name" not in attr_node - assert attr_prefix not in attr_node["name"] - - subl_node = tg_new._graph.nodes[5] - orig_output = tg_orig._graph.nodes[5]["output"] - - assert isinstance(subl_node["output"], TransportableObject) - assert isinstance(subl_node["sublattice_result"], Result) - assert subl_node["output"].get_deserialized() == orig_output - - task_node = tg_new._graph.nodes[6] - orig_output = tg_orig._graph.nodes[6]["output"] - - assert isinstance(task_node["output"], TransportableObject) - assert task_node["output"].get_deserialized() == orig_output - - gen_node = tg_new._graph.nodes[7] - orig_output = tg_orig._graph.nodes[7]["output"] - - assert isinstance(gen_node["output"], TransportableObject) - assert gen_node["output"].get_deserialized() == orig_output - assert "key" not in gen_node - assert generator_prefix not in gen_node["name"] - - gen_node = tg_new._graph.nodes[8] - orig_output = tg_orig._graph.nodes[8]["output"] - - assert isinstance(gen_node["output"], TransportableObject) - assert gen_node["output"].get_deserialized() == orig_output - assert "key" not in gen_node - assert generator_prefix not in gen_node["name"] - - subscript_node = tg_new._graph.nodes[9] - orig_output = tg_orig._graph.nodes[9]["output"] - - assert isinstance(subscript_node["output"], TransportableObject) - assert subscript_node["output"].get_deserialized() == orig_output - assert "key" not in subscript_node - assert subscript_prefix not in subscript_node["name"] - - assert tg_orig._graph.edges == tg_new._graph.edges - - -def test_process_legacy_node(): - """Test process_node""" - - ro = get_sample_result_object() - ro_orig = get_sample_result_object() - tg = ro.lattice.transport_graph - tg_orig = ro_orig.lattice.transport_graph - - task_node = tg._graph.nodes[0] - orig_output = tg_orig._graph.nodes[0]["output"] - process_node(task_node) - - param_node = tg._graph.nodes[2] - orig_output = tg_orig._graph.nodes[2]["output"] - orig_value = tg_orig._graph.nodes[2]["value"] - process_node(param_node) - - param_node = tg._graph.nodes[3] - orig_output = tg_orig._graph.nodes[3]["output"] - orig_value = tg_orig._graph.nodes[3]["value"] - process_node(param_node) - - attr_node = tg._graph.nodes[4] - orig_output = tg_orig._graph.nodes[4]["output"] - assert "attribute_name" in attr_node - assert attr_prefix in attr_node["name"] - process_node(attr_node) - - subl_node = tg._graph.nodes[5] - orig_output = tg_orig._graph.nodes[5]["output"] - assert "sublattice_result" in subl_node - process_node(subl_node) - - task_node = tg._graph.nodes[6] - orig_output = tg_orig._graph.nodes[6]["output"] - process_node(task_node) - - gen_node = tg._graph.nodes[7] - orig_output = tg_orig._graph.nodes[7]["output"] - assert "key" in gen_node - assert generator_prefix in gen_node["name"] - process_node(gen_node) - - gen_node = tg._graph.nodes[8] - orig_output = tg_orig._graph.nodes[8]["output"] - assert "key" in gen_node - assert generator_prefix in gen_node["name"] - process_node(gen_node) - - subscript_node = tg._graph.nodes[9] - orig_output = tg_orig._graph.nodes[9]["output"] - assert "key" in subscript_node - assert subscript_prefix in subscript_node["name"] - process_node(subscript_node) - - -def test_process_transport_graph(): - """Test process_transport_graph""" - - ro = get_sample_result_object() - - tg = ro.lattice.transport_graph - tg_new = process_transport_graph(tg) - compare_nodes_and_edges(tg, tg_new) - assert "dirty_nodes" in tg_new.__dict__ - - -def test_process_lattice(): - """Test process_lattice""" - - ro = get_sample_result_object() - ro_orig = get_sample_result_object() - lattice = process_lattice(ro._lattice) - lattice.named_args = lattice.named_args.get_deserialized() - lattice.named_kwargs = lattice.named_kwargs.get_deserialized() - - assert isinstance(lattice.workflow_function, TransportableObject) - assert list(lattice.named_args.keys()) == ["z"] - assert list(lattice.named_kwargs.keys()) == ["zz"] - assert lattice.metadata["executor_data"]["short_name"] == "local" - assert lattice.metadata["workflow_executor"] == "local" - assert lattice.metadata["workflow_executor_data"] == {} - assert lattice.metadata["deps"] == {} - assert lattice.metadata["call_before"] == [] - assert lattice.metadata["call_after"] == [] - - -def test_process_result_object(): - """Test process_result_object""" - - ro = get_sample_result_object() - old_inputs = ro._inputs - ro_new = process_result_object(ro) - inputs = ro_new.inputs.get_deserialized() - assert old_inputs["args"] == inputs["args"] - assert old_inputs["kwargs"] == inputs["kwargs"] - assert isinstance(ro_new._result, TransportableObject) - assert "dirty_nodes" in ro_new.lattice.transport_graph.__dict__ - - -def test_migrate_pickled_result_object(mocker): - """Test migrate_pickled_result_object""" - - mock_process_ro = mocker.patch("covalent_dispatcher._cli.migrate.process_result_object") - mock_persist = mocker.patch("covalent_dispatcher._db.update.persist") - - migrate_pickled_result_object(result_pkl) - mock_process_ro.assert_called_once() - mock_persist.assert_called_once() diff --git a/tests/covalent_dispatcher_tests/_cli/sample_results_dir/652dc473-fa37-4846-85f3-b314204fd432/result.pkl b/tests/covalent_dispatcher_tests/_cli/sample_results_dir/652dc473-fa37-4846-85f3-b314204fd432/result.pkl deleted file mode 100644 index 1fe2d20c7..000000000 Binary files a/tests/covalent_dispatcher_tests/_cli/sample_results_dir/652dc473-fa37-4846-85f3-b314204fd432/result.pkl and /dev/null differ diff --git a/tests/covalent_dispatcher_tests/_core/dispatcher_db_integration_test.py b/tests/covalent_dispatcher_tests/_core/dispatcher_db_integration_test.py index 53444a7b6..1544b753c 100644 --- a/tests/covalent_dispatcher_tests/_core/dispatcher_db_integration_test.py +++ b/tests/covalent_dispatcher_tests/_core/dispatcher_db_integration_test.py @@ -104,7 +104,7 @@ def list_workflow(arg): @ct.lattice def dict_workflow(arg): - return dict_task(arg) + return dict_task(arg=arg) # 1 2 # \ \ @@ -159,7 +159,7 @@ async def mock_get_incoming_edges(dispatch_id, node_id): # dict-type inputs - # Nodes 0=task, 1=:electron_dict:, 2=1, 3=2 + # Nodes 0=task, 1=:electron_dict:, 2=["a" (3), "b" (4)], 5=[1 (6), 2 (7)] dict_workflow.build_graph({"a": 1, "b": 2}) abstract_args = {"a": 2, "b": 3} tg = dict_workflow.transport_graph @@ -172,10 +172,31 @@ async def mock_get_incoming_edges(dispatch_id, node_id): mock_get_incoming_edges, ) + task_inputs = await _get_abstract_task_inputs( + result_object.dispatch_id, 0, tg.get_node_value(0, "name") + ) + expected_inputs = {"args": [], "kwargs": {"arg": 1}} + + assert task_inputs == expected_inputs + task_inputs = await _get_abstract_task_inputs( result_object.dispatch_id, 1, tg.get_node_value(1, "name") ) - expected_inputs = {"args": [], "kwargs": abstract_args} + expected_inputs = {"args": [2, 5], "kwargs": {}} + + assert task_inputs == expected_inputs + + task_inputs = await _get_abstract_task_inputs( + result_object.dispatch_id, 2, tg.get_node_value(2, "name") + ) + expected_inputs = {"args": [3, 4], "kwargs": {}} + + assert task_inputs == expected_inputs + + task_inputs = await _get_abstract_task_inputs( + result_object.dispatch_id, 5, tg.get_node_value(5, "name") + ) + expected_inputs = {"args": [6, 7], "kwargs": {}} assert task_inputs == expected_inputs diff --git a/tests/covalent_dispatcher_tests/_core/execution_test.py b/tests/covalent_dispatcher_tests/_core/execution_test.py index 6d521691f..4e2c20ac9 100644 --- a/tests/covalent_dispatcher_tests/_core/execution_test.py +++ b/tests/covalent_dispatcher_tests/_core/execution_test.py @@ -116,7 +116,7 @@ def list_workflow(arg): @ct.lattice def dict_workflow(arg): - return dict_task(arg) + return dict_task(arg=arg) # 1 2 # \ \ @@ -167,20 +167,36 @@ def multivar_workflow(x, y): # dict-type inputs dict_workflow.build_graph({"a": 1, "b": 2}) - serialized_args = {"a": ct.TransportableObject(1), "b": ct.TransportableObject(2)} # Nodes 0=task, 1=:electron_dict:, 2=1, 3=2 + # Nodes 0=task, 1=:electron_dict:, 2=["a" (3), "b" (4)], 5=[1 (6), 2 (7)] + sdkres = Result(lattice=dict_workflow, dispatch_id="asdf_dict_workflow") result_object = get_mock_srvresult(sdkres, test_db) tg = result_object.lattice.transport_graph - tg.set_node_value(2, "output", ct.TransportableObject(1)) - tg.set_node_value(3, "output", ct.TransportableObject(2)) + + tg.set_node_value(1, "output", ct.TransportableObject("node_1_output")) + tg.set_node_value(3, "output", ct.TransportableObject("a")) + tg.set_node_value(4, "output", ct.TransportableObject("b")) + tg.set_node_value(6, "output", ct.TransportableObject(1)) + tg.set_node_value(7, "output", ct.TransportableObject(2)) mock_get_result = mocker.patch( "covalent_dispatcher._core.runner.datasvc.get_result_object", return_value=result_object ) - task_inputs = await _get_task_inputs(1, tg.get_node_value(1, "name"), result_object) - expected_inputs = {"args": [], "kwargs": serialized_args} + serialized_kwargs = {"arg": ct.TransportableObject("node_1_output")} + task_inputs = await _get_task_inputs(0, tg.get_node_value(0, "name"), result_object) + expected_inputs = {"args": [], "kwargs": serialized_kwargs} + + serialized_args = [ct.TransportableObject("a"), ct.TransportableObject("b")] + task_inputs = await _get_task_inputs(2, tg.get_node_value(2, "name"), result_object) + expected_inputs = {"args": serialized_args, "kwargs": {}} + + assert task_inputs == expected_inputs + + serialized_args = [ct.TransportableObject(1), ct.TransportableObject(2)] + task_inputs = await _get_task_inputs(5, tg.get_node_value(5, "name"), result_object) + expected_inputs = {"args": serialized_args, "kwargs": {}} assert task_inputs == expected_inputs diff --git a/tests/covalent_dispatcher_tests/_dal/importers/result_import_test.py b/tests/covalent_dispatcher_tests/_dal/importers/result_import_test.py index 440742cba..964e7cbc5 100644 --- a/tests/covalent_dispatcher_tests/_dal/importers/result_import_test.py +++ b/tests/covalent_dispatcher_tests/_dal/importers/result_import_test.py @@ -27,6 +27,7 @@ from covalent._shared_files.schemas.result import AssetSchema, ResultSchema from covalent._shared_files.util_classes import RESULT_STATUS from covalent_dispatcher._dal.importers.result import SERVER_URL, handle_redispatch, import_result +from covalent_dispatcher._dal.job import Job from covalent_dispatcher._dal.result import get_result_object from covalent_dispatcher._db.datastore import DataStore @@ -140,6 +141,7 @@ def test_import_previously_imported_result(mocker, test_db): prefix="covalent-" ) as srv_dir: sub_res = get_mock_result(sub_dispatch_id, sdk_dir) + sub_res.metadata.root_dispatch_id = dispatch_id import_result(sub_res, srv_dir, None) srv_res = get_result_object(dispatch_id, bare=True) parent_node = srv_res.lattice.transport_graph.get_node(0) @@ -152,6 +154,49 @@ def test_import_previously_imported_result(mocker, test_db): assert sub_srv_res._electron_id == parent_node._electron_id +def test_import_subdispatch_cancel_req(mocker, test_db): + """Test that Job.cancel_requested is propagated to sublattices""" + + dispatch_id = "test_propagate_cancel_requested" + sub_dispatch_id = "test_propagate_cancel_requested_sub" + + mocker.patch("covalent_dispatcher._dal.base.workflow_db", test_db) + + mock_filter_uris = mocker.patch( + "covalent_dispatcher._dal.importers.result._filter_remote_uris" + ) + + with tempfile.TemporaryDirectory(prefix="covalent-") as sdk_dir, tempfile.TemporaryDirectory( + prefix="covalent-" + ) as srv_dir: + res = get_mock_result(dispatch_id, sdk_dir) + import_result(res, srv_dir, None) + + with test_db.Session() as session: + Job.update_bulk( + session, values={"cancel_requested": True}, equality_filters={}, membership_filters={} + ) + session.commit() + + with tempfile.TemporaryDirectory(prefix="covalent-") as sdk_dir, tempfile.TemporaryDirectory( + prefix="covalent-" + ) as srv_dir: + sub_res = get_mock_result(sub_dispatch_id, sdk_dir) + sub_res.metadata.root_dispatch_id = dispatch_id + srv_res = get_result_object(dispatch_id, bare=True) + parent_node = srv_res.lattice.transport_graph.get_node(0) + import_result(sub_res, srv_dir, parent_node._electron_id) + + with tempfile.TemporaryDirectory(prefix="covalent-") as srv_dir: + import_result(sub_res, srv_dir, parent_node._electron_id) + + with test_db.Session() as session: + uncancelled = Job.get( + session, fields=[], equality_filters={"cancel_requested": False}, membership_filters={} + ) + assert len(uncancelled) == 0 + + @pytest.mark.parametrize( "parent_status,new_status", [ @@ -230,8 +275,8 @@ def test_import_result_with_custom_assets(mocker, test_db): prefix="covalent-" ) as srv_dir: manifest = get_mock_result(dispatch_id, sdk_dir) - manifest.lattice.custom_assets = {"custom_lattice_asset": AssetSchema(size=0)} - manifest.lattice.transport_graph.nodes[0].custom_assets = { + manifest.lattice.assets._custom = {"custom_lattice_asset": AssetSchema(size=0)} + manifest.lattice.transport_graph.nodes[0].assets._custom = { "custom_electron_asset": AssetSchema(size=0) } filtered_res = import_result(manifest, srv_dir, None) diff --git a/tests/covalent_dispatcher_tests/_dal/lattice_test.py b/tests/covalent_dispatcher_tests/_dal/lattice_test.py index 7a55ac23f..f0f2d9a1a 100644 --- a/tests/covalent_dispatcher_tests/_dal/lattice_test.py +++ b/tests/covalent_dispatcher_tests/_dal/lattice_test.py @@ -83,9 +83,6 @@ def test_lattice_attributes(test_db, mocker): workflow_function = lat.get_value("workflow_function").get_deserialized() assert workflow_function(42) == 42 - res.lattice.lattice_imports == lat.get_value("lattice_imports") - res.lattice.cova_imports == lat.get_value("cova_imports") - def test_lattice_restricted_attributes(test_db, mocker): res = get_mock_result() diff --git a/tests/covalent_dispatcher_tests/_db/update_test.py b/tests/covalent_dispatcher_tests/_db/update_test.py index 567c83bc9..6e7dfb4c9 100644 --- a/tests/covalent_dispatcher_tests/_db/update_test.py +++ b/tests/covalent_dispatcher_tests/_db/update_test.py @@ -154,19 +154,6 @@ def test_result_persist_workflow_1(test_db, result_1, mocker): assert executor_data["short_name"] == le.short_name() assert executor_data["attributes"] == le.__dict__ - saved_named_args = local_store.load_file( - storage_path=lattice_storage_path, filename=lattice_row.named_args_filename - ) - - saved_named_kwargs = local_store.load_file( - storage_path=lattice_storage_path, filename=lattice_row.named_kwargs_filename - ) - saved_named_args_raw = saved_named_args.get_deserialized() - saved_named_kwargs_raw = saved_named_kwargs.get_deserialized() - - assert saved_named_args_raw == {} - assert saved_named_kwargs_raw == {"a": 1, "b": 2} - # Check that the electron records are as expected assert len(electron_rows) == 6 for electron in electron_rows: diff --git a/tests/covalent_dispatcher_tests/_db/write_result_to_db_test.py b/tests/covalent_dispatcher_tests/_db/write_result_to_db_test.py index 26b114913..310367df7 100644 --- a/tests/covalent_dispatcher_tests/_db/write_result_to_db_test.py +++ b/tests/covalent_dispatcher_tests/_db/write_result_to_db_test.py @@ -59,8 +59,6 @@ WORKFLOW_EXECUTOR_DATA_FILENAME = "workflow_executor_data.pkl" ERROR_FILENAME = "error.txt" INPUTS_FILENAME = "inputs.pkl" -NAMED_ARGS_FILENAME = "named_args.pkl" -NAMED_KWARGS_FILENAME = "named_kwargs.pkl" RESULTS_FILENAME = "results.pkl" VALUE_FILENAME = "value.pkl" STDOUT_FILENAME = "stdout.log" @@ -68,8 +66,6 @@ ERROR_FILENAME = "error.log" TRANSPORT_GRAPH_FILENAME = "transport_graph.pkl" HOOKS_FILENAME = "hooks.pkl" -COVA_IMPORTS_FILENAME = "cova_imports.json" -LATTICE_IMPORTS_FILENAME = "lattice_imports.txt" RESULTS_DIR = "/tmp/results" @@ -126,12 +122,8 @@ def get_lattice_kwargs( workflow_executor_data=json.dumps({}), error_filename=ERROR_FILENAME, inputs_filename=INPUTS_FILENAME, - named_args_filename=NAMED_ARGS_FILENAME, - named_kwargs_filename=NAMED_KWARGS_FILENAME, results_filename=RESULTS_FILENAME, hooks_filename=HOOKS_FILENAME, - cova_imports_filename=COVA_IMPORTS_FILENAME, - lattice_imports_filename=LATTICE_IMPORTS_FILENAME, results_dir=RESULTS_DIR, root_dispatch_id="dispatch_1", created_at=None, @@ -159,12 +151,8 @@ def get_lattice_kwargs( "workflow_executor_data": workflow_executor_data, "error_filename": error_filename, "inputs_filename": inputs_filename, - "named_args_filename": named_args_filename, - "named_kwargs_filename": named_kwargs_filename, "results_filename": results_filename, "hooks_filename": hooks_filename, - "cova_imports_filename": cova_imports_filename, - "lattice_imports_filename": lattice_imports_filename, "results_dir": results_dir, "root_dispatch_id": root_dispatch_id, "created_at": created_at, @@ -257,8 +245,8 @@ def test_insert_lattices_data(test_db, mocker): lattice_args = get_lattice_kwargs( dispatch_id=f"dispatch_{i + 1}", name=f"workflow_{i + 1}", - docstring_filename=f"docstring_{i+1}.txt", - storage_path=f"results/dispatch_{i+1}/", + docstring_filename=f"docstring_{i + 1}.txt", + storage_path=f"results/dispatch_{i + 1}/", executor="dask", workflow_executor="dask", created_at=cur_time, @@ -276,22 +264,18 @@ def test_insert_lattices_data(test_db, mocker): assert lattice.dispatch_id == f"dispatch_{i + 1}" assert lattice.electron_id is None assert lattice.name == f"workflow_{i + 1}" - assert lattice.docstring_filename == f"docstring_{i+1}.txt" + assert lattice.docstring_filename == f"docstring_{i + 1}.txt" assert lattice.status == "RUNNING" assert lattice.storage_type == STORAGE_TYPE - assert lattice.storage_path == f"results/dispatch_{i+1}/" + assert lattice.storage_path == f"results/dispatch_{i + 1}/" assert lattice.function_filename == FUNCTION_FILENAME assert lattice.function_string_filename == FUNCTION_STRING_FILENAME assert lattice.executor == "dask" assert lattice.workflow_executor == "dask" assert lattice.error_filename == ERROR_FILENAME assert lattice.inputs_filename == INPUTS_FILENAME - assert lattice.named_args_filename == NAMED_ARGS_FILENAME - assert lattice.named_kwargs_filename == NAMED_KWARGS_FILENAME assert lattice.results_filename == RESULTS_FILENAME assert lattice.hooks_filename == HOOKS_FILENAME - assert lattice.cova_imports_filename == COVA_IMPORTS_FILENAME - assert lattice.lattice_imports_filename == LATTICE_IMPORTS_FILENAME assert lattice.results_dir == RESULTS_DIR assert lattice.root_dispatch_id == f"dispatch_{i + 1}" assert ( diff --git a/tests/covalent_dispatcher_tests/_service/app_test.py b/tests/covalent_dispatcher_tests/_service/app_test.py index 7877fe673..4615e35c5 100644 --- a/tests/covalent_dispatcher_tests/_service/app_test.py +++ b/tests/covalent_dispatcher_tests/_service/app_test.py @@ -206,7 +206,7 @@ def test_register_sublattice(mocker, app, client, mock_manifest): ) mocker.patch("covalent_dispatcher._service.app.cancel_all_with_status") resp = client.post( - "/api/v2/dispatches/parent_dispatch/subdispatches", + "/api/v2/dispatches/parent_dispatch/sublattices", data=mock_manifest.json(), ) diff --git a/tests/covalent_tests/cloud_resource_manager/core_test.py b/tests/covalent_tests/cloud_resource_manager/core_test.py index 7fdca350d..b9b0f2ab0 100644 --- a/tests/covalent_tests/cloud_resource_manager/core_test.py +++ b/tests/covalent_tests/cloud_resource_manager/core_test.py @@ -41,6 +41,18 @@ def executor_module_path(): return "test_executor_module_path" +@pytest.fixture +def executor_infra_defaults(): + from pydantic import BaseModel + + class FakeExecutorInfraDefaults(BaseModel): + string_param: str = "fake_address_123" + number_param: int = 123 + sequence_param: tuple = (1, 2, 3) + + return FakeExecutorInfraDefaults + + @pytest.fixture def crm(mocker, executor_name, executor_module_path): mocker.patch( @@ -377,7 +389,9 @@ def test_get_tf_statefile_path(mocker, crm, executor_name): (False, {"test_key": "test_value"}), ], ) -def test_up(mocker, dry_run, executor_options, executor_name, executor_module_path): +def test_up( + mocker, dry_run, executor_options, executor_name, executor_module_path, executor_infra_defaults +): """ Unit test for CloudResourceManager.up() method """ @@ -401,10 +415,6 @@ def test_up(mocker, dry_run, executor_options, executor_name, executor_module_pa "covalent.cloud_resource_manager.core.get_executor_module", ) - mocker.patch( - "covalent.cloud_resource_manager.core.getattr", - ) - # Mocking as we are instantiating with executor options mocker.patch( "covalent.cloud_resource_manager.core.validate_options", @@ -438,6 +448,9 @@ def test_up(mocker, dry_run, executor_options, executor_name, executor_module_pa options=executor_options, ) + # Override infra defaults with dummy values. + crm.ExecutorInfraDefaults = executor_infra_defaults + with mock.patch( "covalent.cloud_resource_manager.core.open", mock.mock_open(), @@ -652,6 +665,27 @@ def test_crm_get_resource_status(mocker, crm): mock_terraform_error_validator.assert_called_once() +def test_crm_convert_to_tfvar(mocker, crm): + """ + Unit test for CloudResourceManager._convert_to_tfvar() method. + + Test conversion outcomes. + """ + _values_map = { + # Convenient test case (not valid Terraform): + (1, False, None, "covalent321"): '[1, false, null, "covalent321"]', + # Usual test cases: + True: "true", + False: "false", + None: "null", + "covalent123": '"covalent123"', # must include quotes + 16: "16", + } + + for _value, _expected in _values_map.items(): + assert crm._convert_to_tfvar(_value) == _expected + + def test_no_git(crm, mocker): """ Test for exit with status 1 if `git` is not available. diff --git a/tests/covalent_tests/serialize/lattice_serialization_test.py b/tests/covalent_tests/serialize/lattice_serialization_test.py index 4247b6230..709041bde 100644 --- a/tests/covalent_tests/serialize/lattice_serialization_test.py +++ b/tests/covalent_tests/serialize/lattice_serialization_test.py @@ -89,10 +89,10 @@ def workflow(x, y): with tempfile.TemporaryDirectory() as d: manifest = serialize_lattice(workflow, d) - assert ["custom_lat_asset"] == list(manifest.custom_assets.keys()) + assert ["custom_lat_asset"] == list(manifest.assets._custom.keys()) node_0 = manifest.transport_graph.nodes[0] - assert "custom_electron_asset" in node_0.custom_assets + assert "custom_electron_asset" in node_0.assets._custom node_1 = manifest.transport_graph.nodes[1] - assert not node_1.custom_assets + assert not node_1.assets._custom diff --git a/tests/covalent_tests/shared_files/qelectron_utils_test.py b/tests/covalent_tests/shared_files/qelectron_utils_test.py index 6c213e217..2cb66dbb6 100644 --- a/tests/covalent_tests/shared_files/qelectron_utils_test.py +++ b/tests/covalent_tests/shared_files/qelectron_utils_test.py @@ -30,7 +30,7 @@ def test_get_qelectron_db_path(mocker, db_exists): """Test the get_qelectron_db_path function.""" - mock_database = mocker.patch("covalent._shared_files.qelectron_utils.Database") + mock_database = mocker.patch("covalent.quantum.qserver.database.Database") mock_database.return_value.get_db_path.return_value.exists.return_value = db_exists dispatch_id = "mock_dispatch_id" diff --git a/tests/covalent_tests/triggers/database_trigger_test.py b/tests/covalent_tests/triggers/database_trigger_test.py index 734eb2e06..8927417be 100644 --- a/tests/covalent_tests/triggers/database_trigger_test.py +++ b/tests/covalent_tests/triggers/database_trigger_test.py @@ -61,8 +61,8 @@ def test_database_trigger_observe(mocker, where_clauses, database_trigger): database_trigger.where_clauses = where_clauses database_trigger.trigger = mocker.MagicMock() - mock_db_engine = mocker.patch("covalent.triggers.database_trigger.create_engine") - mock_session = mocker.patch("covalent.triggers.database_trigger.Session") + mock_db_engine = mocker.patch("sqlalchemy.create_engine") + mock_session = mocker.patch("sqlalchemy.orm.Session") mock_event = mocker.patch("covalent.triggers.database_trigger.Event") mock_sleep = mocker.patch("covalent.triggers.database_trigger.time.sleep") diff --git a/tests/covalent_tests/workflow/dispatch_source_test.py b/tests/covalent_tests/workflow/dispatch_source_test.py deleted file mode 100644 index 94d588b6b..000000000 --- a/tests/covalent_tests/workflow/dispatch_source_test.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2021 Agnostiq Inc. -# -# This file is part of Covalent. -# -# Licensed under the Apache License 2.0 (the "License"). A copy of the -# License may be obtained with this software package or at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Use of this file is prohibited except in compliance with the License. -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests for writing the dispatch_source.py file""" - -import pytest - -from covalent._results_manager.result import _filter_cova_decorators - -COVA_IMPORTS = {"covalent", "lattice", "electron", "ct", "cova", "etron"} - - -INPUT1 = "\n".join( - [ - "@covalent.electron(", - ' executor="local"', - ")", - "def identity(x):", - " return x", - "", - "@covalent.electron", - "@covalent.lattice", - "@covalent.electron(", - ' executor="local"', - ")", - "def double(x):", - " return 2*x", - ] -) - -INPUT2 = INPUT1.replace("covalent", "ct") -INPUT3 = INPUT1.replace("covalent", "cova") -INPUT4 = INPUT1.replace("ct.electron", "electron") -INPUT5 = INPUT1.replace("ct.electron", "etron") -INPUT6 = INPUT1.replace("ct.lattice", "lattice") - -OUTPUT1 = "\n".join( - [ - "# @covalent.electron(", - '# executor="local"', - "# )", - "def identity(x):", - " return x", - "", - "# @covalent.electron", - "# @covalent.lattice", - "# @covalent.electron(", - '# executor="local"', - "# )", - "def double(x):", - " return 2*x", - ] -) - -OUTPUT2 = OUTPUT1.replace("covalent", "ct") -OUTPUT3 = OUTPUT1.replace("covalent", "cova") -OUTPUT4 = OUTPUT1.replace("ct.electron", "electron") -OUTPUT5 = OUTPUT1.replace("ct.electron", "etron") -OUTPUT6 = OUTPUT1.replace("ct.lattice", "lattice") - - -@pytest.mark.parametrize( - "input_str, expected_str", - [ - (INPUT1, OUTPUT1), - (INPUT2, OUTPUT2), - (INPUT3, OUTPUT3), - (INPUT4, OUTPUT4), - (INPUT5, OUTPUT5), - (INPUT6, OUTPUT6), - ], -) -def test_filter_cova_decorators( - input_str, - expected_str, -): - """Test the filtering out of Covalent-related decorators.""" - - output_str = _filter_cova_decorators(input_str, COVA_IMPORTS) - - assert output_str == expected_str diff --git a/tests/covalent_tests/workflow/electron_test.py b/tests/covalent_tests/workflow/electron_test.py index a3db76bb7..327673b6f 100644 --- a/tests/covalent_tests/workflow/electron_test.py +++ b/tests/covalent_tests/workflow/electron_test.py @@ -33,10 +33,9 @@ _build_sublattice_graph, filter_null_metadata, get_serialized_function_str, - to_decoded_electron_collection, ) from covalent._workflow.lattice import Lattice -from covalent._workflow.transport import TransportableObject, _TransportGraph, encode_metadata +from covalent._workflow.transport import TransportableObject, encode_metadata from covalent.executor.executor_plugins.local import LocalExecutor @@ -238,39 +237,6 @@ def test_wait_for_post_processing_when_returning_waiting_electron(): assert workflow_2.workflow_function.get_deserialized()() == 64 -def test_collection_node_helper_electron(): - """Unit test for `to_decoded_electron_collection`""" - - list_collection = [ - TransportableObject.make_transportable(1), - TransportableObject.make_transportable(2), - ] - - dict_collection = {"a": list_collection[0], "b": list_collection[1]} - assert to_decoded_electron_collection(x=list_collection) == [1, 2] - - assert to_decoded_electron_collection(x=dict_collection) == {"a": 1, "b": 2} - - -def test_electron_add_collection_node(): - """Test `to_decoded_electron_collection` in `Electron.add_collection_node`""" - - def f(x): - return x - - e = Electron(f) - tg = _TransportGraph() - node_id = e.add_collection_node_to_graph(tg, prefix=":") - collection_fn = tg.get_node_value(node_id, "function").get_deserialized() - - collection = [ - TransportableObject.make_transportable(1), - TransportableObject.make_transportable(2), - ] - - assert collection_fn(x=collection) == [1, 2] - - def test_injected_inputs_are_not_in_tg(): """Test that arguments to electrons injected by calldeps aren't added to the transport graph""" @@ -396,18 +362,30 @@ def workflow(x): g = workflow.transport_graph._graph # Account for postprocessing node - assert list(g.nodes) == [0, 1, 2, 3, 4] + assert list(g.nodes) == [0, 1, 2, 3, 4, 5, 6, 7, 8] fn = g.nodes[1]["function"].get_deserialized() - assert fn(x=2, y=5, z=7) == {"x": 2, "y": 5, "z": 7} - assert g.nodes[2]["value"].get_deserialized() == 5 - assert g.nodes[3]["value"].get_deserialized() == 7 + assert fn(["x", "y", "z"], [2, 5, 7]) == {"x": 2, "y": 5, "z": 7} + fn = g.nodes[2]["function"].get_deserialized() + assert fn("x", "y") == ["x", "y"] + keys = [g.nodes[3]["value"].get_deserialized(), g.nodes[4]["value"].get_deserialized()] + fn = g.nodes[5]["function"].get_deserialized() + assert fn(2, 3) == [2, 3] + vals = [g.nodes[6]["value"].get_deserialized(), g.nodes[7]["value"].get_deserialized()] + assert keys == ["x", "y"] + assert vals == [5, 7] assert set(g.edges) == { (1, 0, 0), (2, 1, 0), - (3, 1, 0), - (0, 4, 0), - (0, 4, 1), - (1, 4, 0), + (3, 2, 0), + (4, 2, 0), + (5, 1, 0), + (6, 5, 0), + (7, 5, 0), + (0, 8, 0), + (0, 8, 1), + (1, 8, 0), + (2, 8, 0), + (5, 8, 0), } diff --git a/tests/covalent_tests/workflow/lattice_serialization_test.py b/tests/covalent_tests/workflow/lattice_serialization_test.py index 72d962d0d..4be41091a 100644 --- a/tests/covalent_tests/workflow/lattice_serialization_test.py +++ b/tests/covalent_tests/workflow/lattice_serialization_test.py @@ -55,7 +55,6 @@ def workflow(x): return f(x) workflow.build_graph(5) - workflow.cova_imports = ["dummy_module"] json_workflow = workflow.serialize_to_json() diff --git a/tests/covalent_ui_backend_tests/functional_tests/webhook_test.py b/tests/covalent_ui_backend_tests/functional_tests/webhook_test.py index 25b696f35..50e34e23d 100644 --- a/tests/covalent_ui_backend_tests/functional_tests/webhook_test.py +++ b/tests/covalent_ui_backend_tests/functional_tests/webhook_test.py @@ -70,10 +70,9 @@ async def test_send_update(): assert response is None -@pytest.mark.skip(reason="Test is breaking, need to fix see PR #1728") def test_send_draw_request(): """Test draw request""" - workflow = get_mock_simple_workflow() - lattice = Lattice.deserialize_from_json(workflow.serialize_to_json()) + lattice = get_mock_simple_workflow() + lattice.build_graph(3) response = send_draw_request(lattice) assert response is None diff --git a/tests/functional_tests/workflow_stack_test.py b/tests/functional_tests/workflow_stack_test.py index 47e1578a7..f20b3a0f9 100644 --- a/tests/functional_tests/workflow_stack_test.py +++ b/tests/functional_tests/workflow_stack_test.py @@ -800,7 +800,8 @@ def workflow(x): res_1 = sum_values(x) return square(res_1) - dispatch_id = ct.dispatch(workflow)({"x": 1, "y": 2, "z": 3}) + # Check that non-string keys are allowed + dispatch_id = ct.dispatch(workflow)({"x": 1, "y": 2, 3: 3}) res_obj = rm.get_result(dispatch_id, wait=True) diff --git a/tests/qelectron_tests/core_tests/__init__.py b/tests/qelectron_tests/core_tests/__init__.py new file mode 100644 index 000000000..6b2415372 --- /dev/null +++ b/tests/qelectron_tests/core_tests/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2024 Agnostiq Inc. +# +# This file is part of Covalent. +# +# Licensed under the Apache License 2.0 (the "License"). A copy of the +# License may be obtained with this software package or at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Use of this file is prohibited except in compliance with the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/qelectron_tests/test_decorator.py b/tests/qelectron_tests/core_tests/test_decorator.py similarity index 98% rename from tests/qelectron_tests/test_decorator.py rename to tests/qelectron_tests/core_tests/test_decorator.py index f376d0099..3bd11da15 100644 --- a/tests/qelectron_tests/test_decorator.py +++ b/tests/qelectron_tests/core_tests/test_decorator.py @@ -25,7 +25,7 @@ import covalent as ct EXECUTORS = [ - ct.executor.QiskitExecutor(device="local_sampler", shots=10_000), + ct.executor.Simulator(), ] diff --git a/tests/qelectron_tests/test_qelectron_db.py b/tests/qelectron_tests/core_tests/test_qelectron_db.py similarity index 96% rename from tests/qelectron_tests/test_qelectron_db.py rename to tests/qelectron_tests/core_tests/test_qelectron_db.py index 4d75f296e..7bc771003 100644 --- a/tests/qelectron_tests/test_qelectron_db.py +++ b/tests/qelectron_tests/core_tests/test_qelectron_db.py @@ -25,7 +25,7 @@ def test_db_exposed_in_result(): """ # Define a QElectron circuit. - qexecutor = ct.executor.QiskitExecutor(device="local_sampler") # pylint: disable=no-member + qexecutor = ct.executor.Simulator() # pylint: disable=no-member @ct.qelectron(executors=qexecutor) @qml.qnode(qml.device("default.qubit", wires=1)) diff --git a/tests/qelectron_tests/test_run_later.py b/tests/qelectron_tests/core_tests/test_run_later.py similarity index 97% rename from tests/qelectron_tests/test_run_later.py rename to tests/qelectron_tests/core_tests/test_run_later.py index 169e96662..aec023182 100644 --- a/tests/qelectron_tests/test_run_later.py +++ b/tests/qelectron_tests/core_tests/test_run_later.py @@ -23,7 +23,6 @@ import covalent as ct EXECUTORS = [ - ct.executor.QiskitExecutor(device="local_sampler", shots=10_000), ct.executor.Simulator(), ] diff --git a/tests/qelectron_tests/pennylane_tests/conftest.py b/tests/qelectron_tests/pennylane_tests/conftest.py index 30b774570..773ba2edf 100644 --- a/tests/qelectron_tests/pennylane_tests/conftest.py +++ b/tests/qelectron_tests/pennylane_tests/conftest.py @@ -33,7 +33,7 @@ import pytest import covalent as ct -from covalent._shared_files.utils import get_original_shots +from covalent._shared_files.qelectron_utils import get_original_shots from covalent.quantum.qcluster.simulator import SIMULATOR_DEVICES SKIP_RETURN_TYPES = ["qml.apply", "qml.vn_entropy", "qml.mutual_info"] diff --git a/tests/qelectron_tests/plugin_tests/__init__.py b/tests/qelectron_tests/plugin_tests/__init__.py new file mode 100644 index 000000000..6b2415372 --- /dev/null +++ b/tests/qelectron_tests/plugin_tests/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2024 Agnostiq Inc. +# +# This file is part of Covalent. +# +# Licensed under the Apache License 2.0 (the "License"). A copy of the +# License may be obtained with this software package or at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Use of this file is prohibited except in compliance with the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/qelectron_tests/test_braket_plugin.py b/tests/qelectron_tests/plugin_tests/test_braket_plugin.py similarity index 100% rename from tests/qelectron_tests/test_braket_plugin.py rename to tests/qelectron_tests/plugin_tests/test_braket_plugin.py diff --git a/tests/qelectron_tests/test_qiskit_plugin.py b/tests/qelectron_tests/plugin_tests/test_qiskit_plugin.py similarity index 98% rename from tests/qelectron_tests/test_qiskit_plugin.py rename to tests/qelectron_tests/plugin_tests/test_qiskit_plugin.py index 438532424..b6046005f 100644 --- a/tests/qelectron_tests/test_qiskit_plugin.py +++ b/tests/qelectron_tests/plugin_tests/test_qiskit_plugin.py @@ -23,7 +23,7 @@ import covalent as ct from covalent._shared_files.config import get_config -from .utils import arg_vector, simple_circuit, weight_vector +from ..utils import arg_vector, simple_circuit, weight_vector EXECUTOR_CLASSES = [ ct.executor.QiskitExecutor, diff --git a/tests/qelectron_tests/test_qiskit_plugin_runtime.py b/tests/qelectron_tests/plugin_tests/test_qiskit_plugin_runtime.py similarity index 98% rename from tests/qelectron_tests/test_qiskit_plugin_runtime.py rename to tests/qelectron_tests/plugin_tests/test_qiskit_plugin_runtime.py index 913d54388..e21724d23 100644 --- a/tests/qelectron_tests/test_qiskit_plugin_runtime.py +++ b/tests/qelectron_tests/plugin_tests/test_qiskit_plugin_runtime.py @@ -25,7 +25,7 @@ import covalent as ct from covalent._shared_files.config import get_config -from .utils import arg_vector, cyclic_selector, get_hamiltonian_circuit, weight_vector +from ..utils import arg_vector, cyclic_selector, get_hamiltonian_circuit, weight_vector EXECUTOR_CLASSES = [ ct.executor.QiskitExecutor, diff --git a/tests/requirements.txt b/tests/requirements.txt index 99f936c73..38c17dc3c 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -5,6 +5,7 @@ isort>=5.10.1 locust>=2.11.0 mock>=4.0.3 nbconvert>=6.5.1 +pennylane>=0.31.1 pre-commit>=2.20.0 pytest>=7.1.3 pytest-asyncio>=0.21.0 diff --git a/tests/stress_tests/scripts/mnist_sublattices.py b/tests/stress_tests/scripts/mnist_sublattices.py index 31ad46fc8..2b0e0aab0 100644 --- a/tests/stress_tests/scripts/mnist_sublattices.py +++ b/tests/stress_tests/scripts/mnist_sublattices.py @@ -146,7 +146,7 @@ def test( correct += (pred.argmax(1) == y).type(torch.float).sum().item() test_loss /= num_batches correct /= size - print(f"Test Error: \n Accuracy: {(100*correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") + print(f"Test Error: \n Accuracy: {(100 * correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") def train_model( diff --git a/tests/stress_tests/scripts/sublattices_mixed.py b/tests/stress_tests/scripts/sublattices_mixed.py index 4dc085f0f..d5984b55c 100644 --- a/tests/stress_tests/scripts/sublattices_mixed.py +++ b/tests/stress_tests/scripts/sublattices_mixed.py @@ -147,7 +147,7 @@ def test( correct += (pred.argmax(1) == y).type(torch.float).sum().item() test_loss /= num_batches correct /= size - print(f"Test Error: \n Accuracy: {(100*correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") + print(f"Test Error: \n Accuracy: {(100 * correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") def train_model( diff --git a/tests/stress_tests/scripts/tasks.py b/tests/stress_tests/scripts/tasks.py index 55d9ab8c9..181c4c33a 100644 --- a/tests/stress_tests/scripts/tasks.py +++ b/tests/stress_tests/scripts/tasks.py @@ -175,7 +175,7 @@ def test( correct += (pred.argmax(1) == y).type(torch.float).sum().item() test_loss /= num_batches correct /= size - print(f"Test Error: \n Accuracy: {(100*correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") + print(f"Test Error: \n Accuracy: {(100 * correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") def train_model(