Skip to content
This repository has been archived by the owner on Oct 11, 2024. It is now read-only.

Update README.md (#5466) #2252

Update README.md (#5466)

Update README.md (#5466) #2252

Workflow file for this run

name: Sanity Testing
on:
push:
branches:
- main
workflow_dispatch:
inputs:
user:
description: "User to run sanity test on"
permissions:
# required to retrieve AWS credentials
id-token: write
contents: write
# cancel currently running jobs if a new version of the branch is pushed
concurrency:
group: sanity_testing-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
SetM365App:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests:
needs: [SetM365App]
environment: Testing
runs-on: ubuntu-latest
env:
# Need these in the local env so that corso can read them
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
AZURE_CLIENT_ID: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
AZURE_CLIENT_SECRET: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
# re-used values
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
RESTORE_DEST_PFX: Corso_Test_Sanity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
defaults:
run:
working-directory: src
##########################################################################################################################################
# setup
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- run: go build -o corso
timeout-minutes: 10
- run: go build -o sanity-test ./cmd/sanity_test
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR}
##########################################################################################################################################
# Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
# the latest release. If we wait to clean up the production til after the tests
# It would be possible to complete all the testing but cancel the run before
# cleanup occurs. Setting the cleanup before the tests ensures we always begin
# with a clean slate, and cannot compound data production.
- name: Set purge boundary
if: always()
run: |
echo "NOW=$(date +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge CI-Produced Folders for Users
timeout-minutes: 30
uses: ./.github/actions/purge-m365-data
with:
user: ${{ env.TEST_USER }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Purge CI-Produced Folders for Sites
timeout-minutes: 30
if: always()
uses: ./.github/actions/purge-m365-data
with:
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
##########################################################################################################################################
# Repository commands
- name: Version Test
timeout-minutes: 10
run: |
./corso --version | grep -c 'Corso version:'
- name: Repo init test
timeout-minutes: 10
id: repo-init
run: |
set -euo pipefail
prefix=$(date +"%Y-%m-%d-%T")
echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo init s3 \
--no-stats \
--hide-progress \
--prefix $prefix \
--bucket ${{ secrets.CI_TESTS_S3_BUCKET }} \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
if ! grep -q 'Initialized a S3 repository within bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
then
echo "Repo could not be initialized"
exit 1
fi
echo result="$prefix" >> $GITHUB_OUTPUT
- name: Repo connect test
timeout-minutes: 10
run: |
set -euo pipefail
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo connect s3 \
--no-stats \
--hide-progress \
--prefix ${{ steps.repo-init.outputs.result }} \
--bucket ${{ secrets.CI_TESTS_S3_BUCKET }} \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
if ! grep -q 'Connected to S3 bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
then
echo "Repo could not be connected"
exit 1
fi
# Run maintenance on an empty repo just to make sure the command still
# works.
- name: Repo maintenance test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\nRepo maintenance test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo maintenance \
--no-stats \
--hide-progress \
--mode complete \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
##########################################################################################################################################
# Exchange
# generate new entries to roll into the next load test
# only runs if the test was successful
- name: Exchange - Create new data
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
go run . exchange emails \
--user ${{ env.TEST_USER }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }} \
--count 4
- name: Exchange - Backup
timeout-minutes: 30
id: exchange-backup
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: first-backup
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Incremental backup
timeout-minutes: 30
id: exchange-backup-incremental
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Non delta backup
timeout-minutes: 30
id: exchange-backup-non-delta
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: non-delta
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Incremental backup after non-delta
timeout-minutes: 30
id: exchange-backup-incremental-after-non-delta
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: non-delta-incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Onedrive
# generate new entries for test
- name: OneDrive - Create new data
id: new-data-creation-onedrive
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . onedrive files \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4
echo result="${suffix}" >> $GITHUB_OUTPUT
- name: OneDrive - Backup
id: onedrive-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: onedrive
kind: first-backup
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
# generate some more enteries for incremental check
- name: OneDrive - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
go run . onedrive files \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }} \
--count 4
- name: OneDrive - Incremental backup
id: onedrive-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: onedrive
kind: incremental
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Sharepoint Library
# generate new entries for test
- name: SharePoint - Create new data
id: new-data-creation-sharepoint
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4
echo result="${suffix}" >> $GITHUB_OUTPUT
- name: SharePoint - Backup
id: sharepoint-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: first-backup
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
# generate some more enteries for incremental check
- name: SharePoint - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }} \
--count 4
- name: SharePoint - Incremental backup
id: sharepoint-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: incremental
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
##########################################################################################################################################
# Sharepoint Lists
# generate new entries for test
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
- name: SharePoint Lists - Create new data
id: new-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
# Extracts the common prefix for the Sharepoint list names.
- name: SharePoint Lists - Store restore container
id: sharepoint-lists-store-restore-container
run: |
echo ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Backup
id: sharepoint-lists-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: first-backup-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
# generate some more enteries for incremental check
- name: SharePoint Lists - Create new data (for incremental)
id: inc-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Store restore container (for incremental)
id: sharepoint-lists-store-restore-container-inc
run: |
echo ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Incremental backup
id: sharepoint-lists-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: incremental-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container-inc.outputs.result }},${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
##########################################################################################################################################
# Groups and Teams
# generate new entries for test
- name: Groups - Create new data
id: new-data-creation-groups
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4
echo result="${suffix}" >> $GITHUB_OUTPUT
- name: Groups - Backup
id: groups-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: groups
kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
# generate some more entries for incremental check
- name: Groups - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }} \
--count 4
- name: Groups - Incremental backup
id: groups-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: groups
kind: incremental
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: always()
uses: actions/upload-artifact@v4
with:
name: sanity-test-log
path: ${{ env.CORSO_LOG_DIR }}/*
if-no-files-found: error
retention-days: 14
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Sanity Tests"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}