Skip to content

Commit

Permalink
chore: deleted old component code and fixed linter issues
Browse files Browse the repository at this point in the history
  • Loading branch information
jackdelahunt committed Nov 6, 2024
1 parent 7cf0bd6 commit aae608e
Show file tree
Hide file tree
Showing 9 changed files with 47 additions and 264 deletions.
176 changes: 0 additions & 176 deletions components/datasciencepipelines/datasciencepipelines.go

This file was deleted.

39 changes: 0 additions & 39 deletions components/datasciencepipelines/zz_generated.deepcopy.go

This file was deleted.

39 changes: 0 additions & 39 deletions components/modelregistry/zz_generated.deepcopy.go

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,16 +1,28 @@
package datasciencepipelines

import (
"context"
"fmt"

operatorv1 "github.com/openshift/api/operator/v1"
conditionsv1 "github.com/openshift/custom-resource-status/conditions/v1"
corev1 "k8s.io/api/core/v1"
apiextensionsv1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1"
k8serr "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"sigs.k8s.io/controller-runtime/pkg/client"

componentsv1 "github.com/opendatahub-io/opendatahub-operator/v2/apis/components/v1"
dscv1 "github.com/opendatahub-io/opendatahub-operator/v2/apis/datasciencecluster/v1"
"github.com/opendatahub-io/opendatahub-operator/v2/controllers/status"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/cluster"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/deploy"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/metadata/annotations"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/metadata/labels"
)

const (
ArgoWorkflowCRD = "workflows.argoproj.io"
)

func Init(platform cluster.Platform) error {
Expand Down Expand Up @@ -64,3 +76,26 @@ func GetComponentCR(dsc *dscv1.DataScienceCluster) *componentsv1.DataSciencePipe
},
}
}

func UnmanagedArgoWorkFlowExists(ctx context.Context, cli client.Client) error {
workflowCRD := &apiextensionsv1.CustomResourceDefinition{}
if err := cli.Get(ctx, client.ObjectKey{Name: ArgoWorkflowCRD}, workflowCRD); err != nil {
if k8serr.IsNotFound(err) {
return nil
}
return fmt.Errorf("failed to get existing Workflow CRD : %w", err)
}
// Verify if existing workflow is deployed by ODH with label
odhLabelValue, odhLabelExists := workflowCRD.Labels[labels.ODH.Component(componentsv1.DataSciencePipelinesComponentName)]
if odhLabelExists && odhLabelValue == "true" {
return nil
}

return fmt.Errorf("%s CRD already exists but not deployed by this operator. "+
"Remove existing Argo workflows or set `spec.components.datasciencepipelines.managementState` to Removed to proceed ", ArgoWorkflowCRD)
}

func SetExistingArgoCondition(conditions *[]conditionsv1.Condition, reason, message string) {
status.SetCondition(conditions, string(status.CapabilityDSPv2Argo), reason, message, corev1.ConditionFalse)
status.SetComponentCondition(conditions, componentsv1.DataSciencePipelinesComponentName, status.ReconcileFailed, message, corev1.ConditionFalse)
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import (
"context"

securityv1 "github.com/openshift/api/security/v1"
monitoringv1 "github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring/v1"
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
rbacv1 "k8s.io/api/rbac/v1"
Expand All @@ -35,7 +36,6 @@ import (
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/controller/reconciler"
odhdeploy "github.com/opendatahub-io/opendatahub-operator/v2/pkg/deploy"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/metadata/labels"
monitoringv1 "github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring/v1"
)

var (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package datasciencepipelines
import (
"context"
"fmt"

componentsv1 "github.com/opendatahub-io/opendatahub-operator/v2/apis/components/v1"
odhtypes "github.com/opendatahub-io/opendatahub-operator/v2/pkg/controller/types"
odhdeploy "github.com/opendatahub-io/opendatahub-operator/v2/pkg/deploy"
Expand Down
12 changes: 6 additions & 6 deletions controllers/datasciencecluster/datasciencecluster_controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ import (
componentsv1 "github.com/opendatahub-io/opendatahub-operator/v2/apis/components/v1"
dscv1 "github.com/opendatahub-io/opendatahub-operator/v2/apis/datasciencecluster/v1"
dsciv1 "github.com/opendatahub-io/opendatahub-operator/v2/apis/dscinitialization/v1"
"github.com/opendatahub-io/opendatahub-operator/v2/components/datasciencepipelines"
dashboardctrl "github.com/opendatahub-io/opendatahub-operator/v2/controllers/components/dashboard"
datasciencepipelinesctrl "github.com/opendatahub-io/opendatahub-operator/v2/controllers/components/datasciencepipelines"
modelregistryctrl "github.com/opendatahub-io/opendatahub-operator/v2/controllers/components/modelregistry"
Expand Down Expand Up @@ -209,14 +208,15 @@ func (r *DataScienceClusterReconciler) Reconcile(ctx context.Context, req ctrl.R

return ctrl.Result{}, nil
}

// Check preconditions if this is an upgrade
if instance.Status.Phase == status.PhaseReady {
// Check for existence of Argo Workflows if DSP is
if instance.Status.InstalledComponents[datasciencepipelines.ComponentName] {
if err := datasciencepipelines.UnmanagedArgoWorkFlowExists(ctx, r.Client); err != nil {
if instance.Status.InstalledComponents[componentsv1.DataSciencePipelinesComponentName] {
if err := datasciencepipelinesctrl.UnmanagedArgoWorkFlowExists(ctx, r.Client); err != nil {
message := fmt.Sprintf("Failed upgrade: %v ", err.Error())
_, err = status.UpdateWithRetry(ctx, r.Client, instance, func(saved *dscv1.DataScienceCluster) {
datasciencepipelines.SetExistingArgoCondition(&saved.Status.Conditions, status.ArgoWorkflowExist, message)
datasciencepipelinesctrl.SetExistingArgoCondition(&saved.Status.Conditions, status.ArgoWorkflowExist, message)
status.SetErrorCondition(&saved.Status.Conditions, status.ArgoWorkflowExist, message)
saved.Status.Phase = status.PhaseError
})
Expand Down Expand Up @@ -642,10 +642,10 @@ func (r *DataScienceClusterReconciler) getRequestName(ctx context.Context) (stri
// argoWorkflowCRDPredicates filters the delete events to trigger reconcile when Argo Workflow CRD is deleted.
var argoWorkflowCRDPredicates = predicate.Funcs{
DeleteFunc: func(e event.DeleteEvent) bool {
if e.Object.GetName() == datasciencepipelines.ArgoWorkflowCRD {
if e.Object.GetName() == datasciencepipelinesctrl.ArgoWorkflowCRD {
labelList := e.Object.GetLabels()
// CRD to be deleted with label "app.opendatahub.io/datasciencepipeline":"true", should not trigger reconcile
if value, exist := labelList[labels.ODH.Component(datasciencepipelines.ComponentName)]; exist && value == "true" {
if value, exist := labelList[labels.ODH.Component(componentsv1.DataSciencePipelinesComponentName)]; exist && value == "true" {
return false
}
}
Expand Down
3 changes: 1 addition & 2 deletions tests/e2e/dashboard_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -119,13 +119,12 @@ func (tc *DashboardTestCtx) testOwnerReferences() error {
}

// Test Dashboard CR ownerref
if tc.testDashboardInstance.OwnerReferences[0].Kind != "DataScienceCluster" {
if tc.testDashboardInstance.OwnerReferences[0].Kind != dscKind {
return fmt.Errorf("expected ownerreference DataScienceCluster not found. Got ownereferrence: %v",
tc.testDashboardInstance.OwnerReferences[0].Kind)
}

// Test Dashboard resources

appDeployments, err := tc.testCtx.kubeClient.AppsV1().Deployments(tc.testCtx.applicationsNamespace).List(tc.testCtx.ctx, metav1.ListOptions{
LabelSelector: labels.ODH.Component("dashboard"),
})
Expand Down
Loading

0 comments on commit aae608e

Please sign in to comment.