Skip to content

Commit

Permalink
refactor: moved ARGO cr checking for DSP to an action
Browse files Browse the repository at this point in the history
  • Loading branch information
jackdelahunt committed Nov 11, 2024
1 parent 321ed4b commit 68f4aba
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 47 deletions.
Original file line number Diff line number Diff line change
@@ -1,24 +1,16 @@
package datasciencepipelines

import (
"context"
"fmt"

operatorv1 "github.com/openshift/api/operator/v1"
conditionsv1 "github.com/openshift/custom-resource-status/conditions/v1"
corev1 "k8s.io/api/core/v1"
apiextensionsv1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1"
k8serr "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"sigs.k8s.io/controller-runtime/pkg/client"

componentsv1 "github.com/opendatahub-io/opendatahub-operator/v2/apis/components/v1"
dscv1 "github.com/opendatahub-io/opendatahub-operator/v2/apis/datasciencecluster/v1"
"github.com/opendatahub-io/opendatahub-operator/v2/controllers/status"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/cluster"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/deploy"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/metadata/annotations"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/metadata/labels"
)

const (
Expand Down Expand Up @@ -76,26 +68,3 @@ func GetComponentCR(dsc *dscv1.DataScienceCluster) *componentsv1.DataSciencePipe
},
}
}

func UnmanagedArgoWorkFlowExists(ctx context.Context, cli client.Client) error {
workflowCRD := &apiextensionsv1.CustomResourceDefinition{}
if err := cli.Get(ctx, client.ObjectKey{Name: ArgoWorkflowCRD}, workflowCRD); err != nil {
if k8serr.IsNotFound(err) {
return nil
}
return fmt.Errorf("failed to get existing Workflow CRD : %w", err)
}
// Verify if existing workflow is deployed by ODH with label
odhLabelValue, odhLabelExists := workflowCRD.Labels[labels.ODH.Component(componentsv1.DataSciencePipelinesComponentName)]
if odhLabelExists && odhLabelValue == "true" {
return nil
}

return fmt.Errorf("%s CRD already exists but not deployed by this operator. "+
"Remove existing Argo workflows or set `spec.components.datasciencepipelines.managementState` to Removed to proceed ", ArgoWorkflowCRD)
}

func SetExistingArgoCondition(conditions *[]conditionsv1.Condition, reason, message string) {
status.SetCondition(conditions, string(status.CapabilityDSPv2Argo), reason, message, corev1.ConditionFalse)
status.SetComponentCondition(conditions, componentsv1.DataSciencePipelinesComponentName, status.ReconcileFailed, message, corev1.ConditionFalse)
}
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ func NewComponentReconciler(ctx context.Context, mgr ctrl.Manager) error {
// Add datasciencepipelines-specific actions
WithAction(initialize).
WithAction(devFlags).
WithAction(UnmanagedArgoWorkFlowExists).
WithAction(render.NewAction(

Check failure on line 68 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / Run tests and collect coverage

undefined: render.NewAction

Check failure on line 68 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / golangci-lint

undefined: render.NewAction

Check failure on line 68 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / golangci-lint

undefined: render.NewAction
render.WithCache(true, render.DefaultCachingKeyFn),

Check failure on line 69 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / Run tests and collect coverage

undefined: render.WithCache

Check failure on line 69 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / Run tests and collect coverage

undefined: render.DefaultCachingKeyFn

Check failure on line 69 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / golangci-lint

undefined: render.WithCache

Check failure on line 69 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / golangci-lint

undefined: render.DefaultCachingKeyFn

Check failure on line 69 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / golangci-lint

undefined: render.WithCache

Check failure on line 69 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / golangci-lint

undefined: render.DefaultCachingKeyFn
render.WithLabel(labels.ODH.Component(componentsv1.DataSciencePipelinesComponentName), "true"),

Check failure on line 70 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / Run tests and collect coverage

undefined: render.WithLabel

Check failure on line 70 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / golangci-lint

undefined: render.WithLabel

Check failure on line 70 in controllers/components/datasciencepipelines/datasciencepipelines_controller.go

View workflow job for this annotation

GitHub Actions / golangci-lint

undefined: render.WithLabel
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,20 @@ package datasciencepipelines

import (
"context"
"errors"
"fmt"


corev1 "k8s.io/api/core/v1"
apiextensionsv1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1"
k8serr "k8s.io/apimachinery/pkg/api/errors"
"sigs.k8s.io/controller-runtime/pkg/client"

componentsv1 "github.com/opendatahub-io/opendatahub-operator/v2/apis/components/v1"
"github.com/opendatahub-io/opendatahub-operator/v2/controllers/status"
odhtypes "github.com/opendatahub-io/opendatahub-operator/v2/pkg/controller/types"
odhdeploy "github.com/opendatahub-io/opendatahub-operator/v2/pkg/deploy"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/metadata/labels"
)

func initialize(ctx context.Context, rr *odhtypes.ReconciliationRequest) error {
Expand Down Expand Up @@ -64,3 +73,32 @@ func devFlags(ctx context.Context, rr *odhtypes.ReconciliationRequest) error {

return nil
}

func UnmanagedArgoWorkFlowExists(ctx context.Context, rr *odhtypes.ReconciliationRequest) error {
// Check preconditions if this is an upgrade
if rr.Instance.GetStatus().Phase != status.PhaseReady {
return nil
}

workflowCRD := &apiextensionsv1.CustomResourceDefinition{}
if err := rr.Client.Get(ctx, client.ObjectKey{Name: ArgoWorkflowCRD}, workflowCRD); err != nil {
if k8serr.IsNotFound(err) {
return nil
}
return fmt.Errorf("failed to get existing Workflow CRD : %w", err)
}

// Verify if existing workflow is deployed by ODH with label
// if not then set Argo capability condition to false
odhLabelValue, odhLabelExists := workflowCRD.Labels[labels.ODH.Component(componentsv1.DataSciencePipelinesComponentName)]
if !odhLabelExists || odhLabelValue != "true" {
message := fmt.Sprintf("Failed upgrade: %s CRD already exists but not deployed by this operator"+
"Remove existing Argo workflows or set `spec.components.datasciencepipelines.managementState` to Removed to proceed", ArgoWorkflowCRD)

status.SetCondition(&rr.DSC.Status.Conditions, string(status.CapabilityDSPv2Argo), status.ArgoWorkflowExist, message, corev1.ConditionFalse)

return errors.New(message)
}

return nil
}
16 changes: 0 additions & 16 deletions controllers/datasciencecluster/datasciencecluster_controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,22 +209,6 @@ func (r *DataScienceClusterReconciler) Reconcile(ctx context.Context, req ctrl.R
return ctrl.Result{}, nil
}

// Check preconditions if this is an upgrade
if instance.Status.Phase == status.PhaseReady {
// Check for existence of Argo Workflows if DSP is
if instance.Status.InstalledComponents[componentsv1.DataSciencePipelinesComponentName] {
if err := datasciencepipelinesctrl.UnmanagedArgoWorkFlowExists(ctx, r.Client); err != nil {
message := fmt.Sprintf("Failed upgrade: %v ", err.Error())
_, err = status.UpdateWithRetry(ctx, r.Client, instance, func(saved *dscv1.DataScienceCluster) {
datasciencepipelinesctrl.SetExistingArgoCondition(&saved.Status.Conditions, status.ArgoWorkflowExist, message)
status.SetErrorCondition(&saved.Status.Conditions, status.ArgoWorkflowExist, message)
saved.Status.Phase = status.PhaseError
})
return ctrl.Result{}, err
}
}
}

// Start reconciling
if instance.Status.Conditions == nil {
reason := status.ReconcileInit
Expand Down

0 comments on commit 68f4aba

Please sign in to comment.