diff --git a/Makefile b/Makefile index c8bf0b214..50d4babb9 100644 --- a/Makefile +++ b/Makefile @@ -165,7 +165,7 @@ uninstall-server-package: @$(PYTHON_PIP) uninstall elyra -y install-server-package: uninstall-server-package - $(PYTHON_PIP) install --upgrade --upgrade-strategy $(UPGRADE_STRATEGY) "$(shell find dist -name "elyra-*-py3-none-any.whl")[kfp-tekton]" + $(PYTHON_PIP) install --upgrade --upgrade-strategy $(UPGRADE_STRATEGY) "$(shell find dist -name "elyra-*-py3-none-any.whl")" install-server: build-dependencies lint-server build-server install-server-package ## Build and install backend diff --git a/docs/source/getting_started/installation.md b/docs/source/getting_started/installation.md index 231eac603..8db705852 100644 --- a/docs/source/getting_started/installation.md +++ b/docs/source/getting_started/installation.md @@ -39,7 +39,6 @@ You can install elyra with all optional dependencies or with specific dependenci - `elyra[airflow]` - install the Elyra core features and support for [Apache Airflow pipelines](https://github.com/apache/airflow) - `elyra[airflow-gitlab]` - install the Elyra core features and GitLab support for [Apache Airflow pipelines](https://github.com/apache/airflow) - `elyra[kfp]` - install the Elyra core features and support for [Kubeflow Pipelines](https://github.com/kubeflow/pipelines) -- `elyra[kfp-tekton]` - install the Elyra core features and support for [Kubeflow Pipelines on Tekton](https://github.com/kubeflow/kfp-tekton) - `elyra[kfp-examples]` - install the Elyra core features and [Kubeflow Pipelines custom component examples](https://github.com/elyra-ai/examples/tree/main/component-catalog-connectors/kfp-example-components-connector) @@ -92,7 +91,7 @@ conda install -c conda-forge "elyra[all]" ``` **NOTE:** -The Elyra packaging process was changed in version 4.0. The [Apache Airflow pipelines](https://github.com/apache/airflow) or [Kubeflow Pipelines on Tekton](https://github.com/kubeflow/kfp-tekton) dependencies are no longer installed by default. To install this dependency, you must specify `elyra[all]`, `elyra[kfp]` or `elyra[kfp-tekton]`. +The Elyra packaging process was changed in version 4.0. The [Apache Airflow pipelines](https://github.com/apache/airflow) or [Kubeflow Pipelines](https://github.com/kubeflow/pipelines) dependencies are no longer installed by default. To install this dependency, you must specify `elyra[all]` or `elyra[kfp]`. You can also install the Pipeline editor, Code Snippet, Code Viewer, or Script editor extensions individually: diff --git a/docs/source/user_guide/pipelines.md b/docs/source/user_guide/pipelines.md index 242e9663c..bd991c1b3 100644 --- a/docs/source/user_guide/pipelines.md +++ b/docs/source/user_guide/pipelines.md @@ -36,7 +36,7 @@ Please review the [_Best practices for file-based pipeline nodes_ topic in the _ Elyra pipelines support three runtime platforms: - Local/JupyterLab -- [Kubeflow Pipelines](https://www.kubeflow.org/docs/components/pipelines/) (with Argo or [Tekton](https://github.com/kubeflow/kfp-tekton/) workflow engines) +- [Kubeflow Pipelines](https://www.kubeflow.org/docs/components/pipelines/) (with Argo workflow engine) - [Apache Airflow](https://airflow.apache.org/) #### Generic pipelines @@ -373,7 +373,7 @@ Use the [`elyra-pipeline`](command-line-interface.html#working-with-pipelines) ` ```bash $ elyra-pipeline submit elyra-pipelines/a-kubeflow.pipeline \ - --runtime-config kfp-shared-tekton + --runtime-config kfp ``` For Kubeflow Pipelines the `--monitor` option is supported. If specified, the pipeline execution status is monitored for up to `--monitor-timeout` minutes (default: 60) and the `elyra-pipeline submit` command terminates as follows: diff --git a/docs/source/user_guide/runtime-conf.md b/docs/source/user_guide/runtime-conf.md index 7d8a13bc6..eaa0e50c3 100644 --- a/docs/source/user_guide/runtime-conf.md +++ b/docs/source/user_guide/runtime-conf.md @@ -259,14 +259,11 @@ A password or token is required for most authentication types. Refer to the [Kub Example: `mypassword` ##### Kubeflow Pipelines engine (engine) -The engine being used by Kubeflow Pipelines to run pipelines: `Argo` or `Tekton`. If you have access to the Kubernetes cluster where Kubeflow Pipelines is deployed, run these commands in a terminal window to determine the engine type. +The engine being used by Kubeflow Pipelines to run pipelines: `Argo`. If you have access to the Kubernetes cluster where Kubeflow Pipelines is deployed, run these commands in a terminal window to determine the engine type. ``` # If this command completes successfully, the engine type is Argo. kubectl describe configmap -n kubeflow workflow-controller-configmap - -# If this command completes successfully, the engine type is Tekton. -kubectl describe configmap -n kubeflow kfp-tekton-config ``` The default is `Argo`. diff --git a/elyra/cli/pipeline_app.py b/elyra/cli/pipeline_app.py index 68ffa0365..6dd1b7dc6 100644 --- a/elyra/cli/pipeline_app.py +++ b/elyra/cli/pipeline_app.py @@ -428,8 +428,7 @@ def _monitor_kfp_submission(runtime_config: dict, runtime_config_name: str, run_ raise click.ClickException(f"Kubeflow authentication failed: {ae}") try: - # Create a Kubeflow Pipelines client. There is no need to use a Tekton client, - # because the monitoring API is agnostic. + # Create a Kubeflow Pipelines client. client = ArgoClient( host=runtime_config.metadata["api_endpoint"].rstrip("/"), cookies=auth_info.get("cookies", None), diff --git a/elyra/metadata/schemas/kfp.json b/elyra/metadata/schemas/kfp.json index 701549b44..f5c333d07 100644 --- a/elyra/metadata/schemas/kfp.json +++ b/elyra/metadata/schemas/kfp.json @@ -77,7 +77,7 @@ "title": "Kubeflow Pipelines engine", "description": "The Kubeflow Pipelines engine in use", "type": "string", - "enum": ["Argo", "Tekton"], + "enum": ["Argo"], "default": "Argo", "uihints": { "category": "Kubeflow Pipelines" diff --git a/elyra/metadata/schemasproviders.py b/elyra/metadata/schemasproviders.py index 852f90195..7f04f4de7 100644 --- a/elyra/metadata/schemasproviders.py +++ b/elyra/metadata/schemasproviders.py @@ -23,12 +23,6 @@ import entrypoints from traitlets import log # noqa H306 -try: - from kfp_tekton import TektonClient -except ImportError: - # We may not have kfp-tekton available and that's okay, for example when only using airflow! - TektonClient = None - from elyra.metadata.schema import SchemasProvider from elyra.metadata.schemaspaces import CodeSnippets from elyra.metadata.schemaspaces import ComponentCatalogs @@ -98,17 +92,6 @@ def get_schemas(self) -> List[Dict]: ) if kfp_schema_present: # Update the kfp engine enum to reflect current packages... - # If TektonClient package is missing, navigate to the engine property - # and remove 'tekton' entry if present and return updated result. - if not TektonClient: - # locate the schema and update the enum - for schema in runtime_schemas: - if schema["name"] == "kfp": - engine_enum: list = schema["properties"]["metadata"]["properties"]["engine"]["enum"] - if "Tekton" in engine_enum: - engine_enum.remove("Tekton") - schema["properties"]["metadata"]["properties"]["engine"]["enum"] = engine_enum - # For KFP schemas replace placeholders: # - properties.metadata.properties.auth_type.enum ({AUTH_PROVIDER_PLACEHOLDERS}) # - properties.metadata.properties.auth_type.default ({DEFAULT_AUTH_PROVIDER_PLACEHOLDER}) diff --git a/elyra/pipeline/kfp/PipelineConf.py b/elyra/pipeline/kfp/PipelineConf.py index ddf3fea38..d4fcb3106 100644 --- a/elyra/pipeline/kfp/PipelineConf.py +++ b/elyra/pipeline/kfp/PipelineConf.py @@ -1,3 +1,18 @@ +# +# Copyright 2018-2025 Elyra Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# from typing import Union from kubernetes.client.models import V1PodDNSConfig @@ -20,7 +35,6 @@ def __init__(self): def set_image_pull_secrets(self, image_pull_secrets): """Configures the pipeline level imagepullsecret. - Args: image_pull_secrets: a list of Kubernetes V1LocalObjectReference For detailed description, check Kubernetes V1LocalObjectReference definition @@ -31,7 +45,6 @@ def set_image_pull_secrets(self, image_pull_secrets): def set_timeout(self, seconds: int): """Configures the pipeline level timeout. - Args: seconds: number of seconds for timeout """ @@ -41,7 +54,6 @@ def set_timeout(self, seconds: int): def set_parallelism(self, max_num_pods: int): """Configures the max number of total parallel pods that can execute at the same time in a workflow. - Args: max_num_pods: max number of total parallel pods. """ @@ -53,7 +65,6 @@ def set_parallelism(self, max_num_pods: int): def set_ttl_seconds_after_finished(self, seconds: int): """Configures the ttl after the pipeline has finished. - Args: seconds: number of seconds for the workflow to be garbage collected after it is finished. @@ -64,7 +75,6 @@ def set_ttl_seconds_after_finished(self, seconds: int): def set_pod_disruption_budget(self, min_available: Union[int, str]): """PodDisruptionBudget holds the number of concurrent disruptions that you allow for pipeline Pods. - Args: min_available (Union[int, str]): An eviction is allowed if at least "minAvailable" pods selected by "selector" will still be available after @@ -77,12 +87,9 @@ def set_pod_disruption_budget(self, min_available: Union[int, str]): def set_default_pod_node_selector(self, label_name: str, value: str): """Add a constraint for nodeSelector for a pipeline. - Each constraint is a key-value pair label. - For the container to be eligible to run on a node, the node must have each of the constraints appeared as labels. - Args: label_name: The name of the constraint label. value: The value of the constraint label. @@ -92,7 +99,6 @@ def set_default_pod_node_selector(self, label_name: str, value: str): def set_image_pull_policy(self, policy: str): """Configures the default image pull policy. - Args: policy: the pull policy, has to be one of: Always, Never, IfNotPresent. For more info: @@ -104,7 +110,6 @@ def set_image_pull_policy(self, policy: str): def add_op_transformer(self, transformer): """Configures the op_transformers which will be applied to all ops in the pipeline. The ops can be ResourceOp, VolumeOp, or ContainerOp. - Args: transformer: A function that takes a kfp Op as input and returns a kfp Op """ @@ -112,15 +117,12 @@ def add_op_transformer(self, transformer): def set_dns_config(self, dns_config: V1PodDNSConfig): """Set the dnsConfig to be given to each pod. - Args: dns_config: Kubernetes V1PodDNSConfig For detailed description, check Kubernetes V1PodDNSConfig definition https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1PodDNSConfig.md - Example: :: - import kfp from kubernetes.client.models import V1PodDNSConfig, V1PodDNSConfigOption pipeline_conf = kfp.dsl.PipelineConf() @@ -139,10 +141,8 @@ def data_passing_method(self): def data_passing_method(self, value): """Sets the object representing the method used for intermediate data passing. - Example: :: - from kfp.dsl import PipelineConf, data_passing_methods from kubernetes.client.models import V1Volume, V1PersistentVolumeClaimVolumeSource pipeline_conf = PipelineConf() diff --git a/elyra/pipeline/kfp/kfp_authentication.py b/elyra/pipeline/kfp/kfp_authentication.py index dbc890ad8..7f3d4690c 100644 --- a/elyra/pipeline/kfp/kfp_authentication.py +++ b/elyra/pipeline/kfp/kfp_authentication.py @@ -27,9 +27,9 @@ from typing import Tuple from urllib.parse import urlsplit -from kfp.auth import KF_PIPELINES_SA_TOKEN_ENV -from kfp.auth import KF_PIPELINES_SA_TOKEN_PATH -from kfp.auth import ServiceAccountTokenVolumeCredentials +from kfp.client import KF_PIPELINES_SA_TOKEN_ENV +from kfp.client import KF_PIPELINES_SA_TOKEN_PATH +from kfp.client import ServiceAccountTokenVolumeCredentials import requests diff --git a/elyra/pipeline/kfp/kfp_processor.py b/elyra/pipeline/kfp/kfp_processor.py index ac0514a66..2c69ca399 100644 --- a/elyra/pipeline/kfp/kfp_processor.py +++ b/elyra/pipeline/kfp/kfp_processor.py @@ -39,19 +39,11 @@ from kfp import Client as ArgoClient from kfp import compiler as kfp_argo_compiler from kfp import components as components -from kfp.dsl import PipelineConf -from kfp.dsl import RUN_ID_PLACEHOLDER from kubernetes import client as k8s_client from traitlets import default from traitlets import Unicode -try: - from kfp_tekton import compiler as kfp_tekton_compiler - from kfp_tekton import TektonClient -except ImportError: - # We may not have kfp-tekton available and that's okay! - kfp_tekton_compiler = None - TektonClient = None +RUN_ID_PLACEHOLDER = "{{workflow.uid}}" from elyra._version import __version__ from elyra.metadata.schemaspaces import RuntimeImages @@ -61,6 +53,7 @@ from elyra.pipeline.kfp.kfp_authentication import AuthenticationError from elyra.pipeline.kfp.kfp_authentication import KFPAuthenticator from elyra.pipeline.kfp.kfp_properties import KfpPipelineParameter +from elyra.pipeline.kfp.PipelineConf import PipelineConf from elyra.pipeline.pipeline import Operation from elyra.pipeline.pipeline import Pipeline from elyra.pipeline.processor import PipelineProcessor @@ -90,7 +83,6 @@ class WorkflowEngineType(Enum): """ ARGO = "argo" - TEKTON = "tekton" @staticmethod def get_instance_by_value(value: str) -> "WorkflowEngineType": @@ -173,11 +165,6 @@ def process(self, pipeline): api_password = runtime_configuration.metadata.get("api_password") user_namespace = runtime_configuration.metadata.get("user_namespace") workflow_engine = WorkflowEngineType.get_instance_by_value(runtime_configuration.metadata.get("engine", "argo")) - if workflow_engine == WorkflowEngineType.TEKTON and not TektonClient: - raise ValueError( - "Python package `kfp-tekton` is not installed. " - "Please install using `elyra[kfp-tekton]` to use Tekton engine." - ) # unpack Cloud Object Storage configs cos_endpoint = runtime_configuration.metadata["cos_endpoint"] @@ -206,23 +193,14 @@ def process(self, pipeline): # Create Kubeflow Client ############# try: - if workflow_engine == WorkflowEngineType.TEKTON: - client = TektonClient( - host=api_endpoint, - cookies=auth_info.get("cookies", None), - credentials=auth_info.get("credentials", None), - existing_token=auth_info.get("existing_token", None), - namespace=user_namespace, - ssl_ca_cert=auth_info.get("ssl_ca_cert", None), - ) - else: - client = ArgoClient( - host=api_endpoint, - cookies=auth_info.get("cookies", None), - credentials=auth_info.get("credentials", None), - existing_token=auth_info.get("existing_token", None), - namespace=user_namespace, - ) + client = ArgoClient( + host=api_endpoint, + cookies=auth_info.get("cookies", None), + credentials=auth_info.get("credentials", None), + existing_token=auth_info.get("existing_token", None), + namespace=user_namespace, + ssl_ca_cert=auth_info.get("ssl_ca_cert", None), + ) except Exception as ex: # a common cause of these errors is forgetting to include `/pipeline` or including it with an 's' api_endpoint_obj = urlsplit(api_endpoint) @@ -276,7 +254,7 @@ def process(self, pipeline): with tempfile.TemporaryDirectory() as temp_dir: self.log.debug(f"Created temporary directory at: {temp_dir}") - pipeline_path = os.path.join(temp_dir, f"{pipeline_name}.tar.gz") + pipeline_path = os.path.join(temp_dir, f"{pipeline_name}.yaml") ############# # Get Pipeline ID @@ -352,11 +330,15 @@ def process(self, pipeline): ) # extract the ID of the pipeline we created - pipeline_id = kfp_pipeline.id + pipeline_id = kfp_pipeline.pipeline_id # the initial "pipeline version" has the same id as the pipeline itself - version_id = pipeline_id - + version_details = client.list_pipeline_versions(pipeline_id=pipeline_id) + version_list = version_details.pipeline_versions + if isinstance(version_list, list): + version_id = version_list[0].pipeline_version_id + else: + version_id = None # CASE 2: pipeline already exists else: # upload the "pipeline version" @@ -367,7 +349,7 @@ def process(self, pipeline): ) # extract the id of the "pipeline version" that was created - version_id = kfp_pipeline.id + version_id = kfp_pipeline.pipeline_version_id except Exception as ex: # a common cause of these errors is forgetting to include `/pipeline` or including it with an 's' @@ -417,7 +399,10 @@ def process(self, pipeline): # create pipeline run (or specified pipeline version) run = client.run_pipeline( - experiment_id=experiment.id, job_name=job_name, pipeline_id=pipeline_id, version_id=version_id + experiment_id=experiment.experiment_id, + job_name=job_name, + pipeline_id=pipeline_id, + version_id=version_id, ) except Exception as ex: @@ -436,7 +421,7 @@ def process(self, pipeline): self.log_pipeline_info( pipeline_name, - f"pipeline submitted: {public_api_endpoint}/#/runs/details/{run.id}", + f"pipeline submitted: {public_api_endpoint}/#/runs/details/{run.run_id}", duration=time.time() - t0, ) @@ -451,8 +436,8 @@ def process(self, pipeline): object_storage_path = None return KfpPipelineProcessorResponse( - run_id=run.id, - run_url=f"{public_api_endpoint}/#/runs/details/{run.id}", + run_id=run.run_id, + run_url=f"{public_api_endpoint}/#/runs/details/{run.run_id}", object_storage_url=object_storage_url, object_storage_path=object_storage_path, ) @@ -495,8 +480,6 @@ def export( ) workflow_engine = WorkflowEngineType.get_instance_by_value(runtime_configuration.metadata.get("engine", "argo")) - if workflow_engine == WorkflowEngineType.TEKTON and not TektonClient: - raise ValueError("kfp-tekton not installed. Please install using elyra[kfp-tekton] to use Tekton engine.") if Path(absolute_pipeline_export_path).exists() and not overwrite: raise ValueError("File " + absolute_pipeline_export_path + " already exists.") @@ -566,7 +549,7 @@ def _generate_pipeline_dsl( code_generation_options = {} # Load Kubeflow Pipelines Python DSL template - loader = PackageLoader("elyra", "templates/kubeflow/v1") + loader = PackageLoader("elyra", "templates/kubeflow/v2") template_env = Environment(loader=loader) # Add filter that produces a Python-safe variable name template_env.filters["python_safe"] = lambda x: re.sub(r"[" + re.escape(string.punctuation) + "\\s]", "_", x) @@ -669,12 +652,7 @@ def _compile_pipeline_dsl( # in the generated Python DSL "generated_pipeline" pipeline_function = getattr(mod, "generated_pipeline") # compile the DSL - if workflow_engine == WorkflowEngineType.TEKTON: - kfp_tekton_compiler.TektonCompiler().compile( - pipeline_function, output_file, pipeline_conf=pipeline_conf - ) - else: - kfp_argo_compiler.Compiler().compile(pipeline_function, output_file, pipeline_conf=pipeline_conf) + kfp_argo_compiler.Compiler().compile(pipeline_function, output_file) except Exception as ex: raise RuntimeError( f"Failed to compile pipeline with workflow_engine '{workflow_engine.value}' to '{output_file}'" @@ -730,7 +708,7 @@ def _generate_workflow_tasks( pipeline.pipeline_properties.get(pipeline_constants.COS_OBJECT_PREFIX), pipeline_instance_id ) # - load the generic component definition template - template_env = Environment(loader=PackageLoader("elyra", "templates/kubeflow/v1")) + template_env = Environment(loader=PackageLoader("elyra", "templates/kubeflow/v2")) generic_component_template = template_env.get_template("generic_component_definition_template.jinja2") # Add filter that escapes the " character in strings template_env.filters["string_delimiter_safe"] = lambda string: re.sub('"', '\\\\\\\\"', string) @@ -900,12 +878,8 @@ def _generate_workflow_tasks( # Generate unique ELYRA_RUN_NAME value, which gets exposed as an environment # variable - if workflow_engine == WorkflowEngineType.TEKTON: - # Value is derived from an existing annotation; use dummy value - workflow_task["task_modifiers"]["set_run_name"] = "dummy value" - else: - # Use Kubeflow Pipelines provided RUN_ID_PLACEHOLDER as run name - workflow_task["task_modifiers"]["set_run_name"] = RUN_ID_PLACEHOLDER + # Use Kubeflow Pipelines provided RUN_ID_PLACEHOLDER as run name + workflow_task["task_modifiers"]["set_run_name"] = RUN_ID_PLACEHOLDER # Upload dependencies to cloud storage self._upload_dependencies_to_object_store( @@ -926,13 +900,13 @@ def _generate_workflow_tasks( # Identify task inputs and outputs using the component spec # If no data type was specified, string is assumed factory_function = components.load_component_from_text(component.definition) - for input in factory_function.component_spec.inputs or []: - sanitized_input_name = self._sanitize_param_name(input.name) + for input_key, input_value in (factory_function.component_spec.inputs or {}).items(): + sanitized_input_name = self._sanitize_param_name(input_key) workflow_task["task_inputs"][sanitized_input_name] = { "value": None, "task_output_reference": None, "pipeline_parameter_reference": None, - "data_type": (input.type or "string").lower(), + "data_type": (input_value.type or "string").lower(), } # Determine whether the value needs to be rendered in quotes # in the generated DSL code. For example "my name" (string), and 34 (integer). @@ -944,9 +918,9 @@ def _generate_workflow_tasks( "bool", ] - for output in factory_function.component_spec.outputs or []: - workflow_task["task_outputs"][self._sanitize_param_name(output.name)] = { - "data_type": output.type, + for output_key, output_value in (factory_function.component_spec.outputs or {}).items(): + workflow_task["task_outputs"][self._sanitize_param_name(output_key)] = { + "data_type": output_value.type, } # Iterate over component properties and assign values to diff --git a/elyra/tests/pipeline/kfp/conftest.py b/elyra/tests/pipeline/kfp/conftest.py index c6ccef079..901e0c73f 100644 --- a/elyra/tests/pipeline/kfp/conftest.py +++ b/elyra/tests/pipeline/kfp/conftest.py @@ -29,7 +29,6 @@ from elyra.metadata.schemaspaces import RuntimeImages from elyra.metadata.schemaspaces import Runtimes from elyra.metadata.storage import FileMetadataStore -from elyra.pipeline.kfp.kfp_processor import WorkflowEngineType from elyra.pipeline.parser import PipelineParser from elyra.pipeline.pipeline import Pipeline from elyra.pipeline.pipeline_constants import COS_OBJECT_PREFIX @@ -158,7 +157,7 @@ def metadata_dependencies(metadata_managers, request): - "pipeline_file": existing pipeline filename Optional inputs: - "with_cos_object_prefix": bool (default: False) - - "workflow_engine": WorkflowEngineType.ARGO or WorkflowEngineType.TEKTON + - "workflow_engine": WorkflowEngineType.ARGO - "use_cos_credentials_secret": bool (default: False) - "require_pull_secret": bool (default: False) The fixture yields a dictionary with the following keys: @@ -250,10 +249,7 @@ def create_runtime_config(rt_metadata_manager: MetadataManager, customization_op }, } - if customization_options.get("workflow_engine") == WorkflowEngineType.TEKTON: - kfp_runtime_config["metadata"]["engine"] = "Tekton" - else: - kfp_runtime_config["metadata"]["engine"] = "Argo" + kfp_runtime_config["metadata"]["engine"] = "Argo" if customization_options.get("use_cos_credentials_secret"): kfp_runtime_config["metadata"]["cos_auth_type"] = "KUBERNETES_SECRET" diff --git a/elyra/tests/pipeline/kfp/test_kfp_component_parser.py b/elyra/tests/pipeline/kfp/test_kfp_component_parser.py index c420f558d..f01abd0b4 100644 --- a/elyra/tests/pipeline/kfp/test_kfp_component_parser.py +++ b/elyra/tests/pipeline/kfp/test_kfp_component_parser.py @@ -308,7 +308,7 @@ def test_parse_kfp_component_url(): reader = UrlComponentCatalogConnector(kfp_supported_file_types) # Read contents of given path - url = "https://raw.githubusercontent.com/kubeflow/pipelines/1.4.1/components/notebooks/Run_notebook_using_papermill/component.yaml" # noqa: E501 + url = "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/notebooks/Run_notebook_using_papermill/component.yaml" # noqa: E501 catalog_entry_data = {"url": url} # Construct a catalog instance @@ -458,7 +458,7 @@ async def test_parse_components_additional_metatypes(): reader = UrlComponentCatalogConnector(kfp_supported_file_types) # Read contents of given path - url = "https://raw.githubusercontent.com/kubeflow/pipelines/1.4.1/components/keras/Train_classifier/from_CSV/component.yaml" # noqa: E501 + url = "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/keras/Train_classifier/from_CSV/component.yaml" # noqa: E501 catalog_entry_data = {"url": url} # Construct a catalog instance diff --git a/elyra/tests/pipeline/kfp/test_kfp_processor.py b/elyra/tests/pipeline/kfp/test_kfp_processor.py index a86327266..b33fc947c 100644 --- a/elyra/tests/pipeline/kfp/test_kfp_processor.py +++ b/elyra/tests/pipeline/kfp/test_kfp_processor.py @@ -15,37 +15,25 @@ # from datetime import datetime import hashlib -import json import os from pathlib import Path import re from typing import Any from typing import Dict -from kfp.dsl import RUN_ID_PLACEHOLDER import pytest import yaml from elyra.pipeline.catalog_connector import FilesystemComponentCatalogConnector from elyra.pipeline.component import Component -from elyra.pipeline.kfp.kfp_processor import CRIO_VOL_DEF_MEDIUM -from elyra.pipeline.kfp.kfp_processor import CRIO_VOL_DEF_NAME -from elyra.pipeline.kfp.kfp_processor import CRIO_VOL_DEF_SIZE -from elyra.pipeline.kfp.kfp_processor import CRIO_VOL_MOUNT_PATH -from elyra.pipeline.kfp.kfp_processor import CRIO_VOL_PYTHON_PATH -from elyra.pipeline.kfp.kfp_processor import CRIO_VOL_WORKDIR_PATH from elyra.pipeline.kfp.kfp_processor import KfpPipelineProcessor +from elyra.pipeline.kfp.kfp_processor import RUN_ID_PLACEHOLDER from elyra.pipeline.kfp.kfp_processor import WorkflowEngineType from elyra.pipeline.kfp.kfp_properties import KfpPipelineParameter from elyra.pipeline.pipeline import GenericOperation from elyra.pipeline.pipeline import Operation from elyra.pipeline.pipeline import Pipeline -from elyra.pipeline.pipeline_constants import COS_OBJECT_PREFIX -from elyra.pipeline.pipeline_constants import KUBERNETES_POD_ANNOTATIONS -from elyra.pipeline.pipeline_constants import KUBERNETES_POD_LABELS -from elyra.pipeline.pipeline_constants import KUBERNETES_SECRETS from elyra.pipeline.pipeline_constants import KUBERNETES_SHARED_MEM_SIZE -from elyra.pipeline.pipeline_constants import KUBERNETES_TOLERATIONS from elyra.pipeline.pipeline_constants import MOUNTED_VOLUMES from elyra.pipeline.processor import PipelineProcessor from elyra.pipeline.properties import ComponentProperty @@ -57,7 +45,6 @@ from elyra.pipeline.properties import KubernetesSecret from elyra.pipeline.properties import KubernetesToleration from elyra.pipeline.properties import VolumeMount -from elyra.util.cos import join_paths from elyra.util.kubernetes import sanitize_label_value PIPELINE_FILE_COMPLEX = str((Path("resources") / "sample_pipelines" / "pipeline_dependency_complex.json").as_posix()) @@ -100,10 +87,6 @@ def test_WorkflowEngineType_get_instance_by_value(): assert WorkflowEngineType.get_instance_by_value("ARGO") == WorkflowEngineType.ARGO assert WorkflowEngineType.get_instance_by_value("aRGo") == WorkflowEngineType.ARGO assert WorkflowEngineType.get_instance_by_value("Argo") == WorkflowEngineType.ARGO - assert WorkflowEngineType.get_instance_by_value("tekton") == WorkflowEngineType.TEKTON - assert WorkflowEngineType.get_instance_by_value("TEKTON") == WorkflowEngineType.TEKTON - assert WorkflowEngineType.get_instance_by_value("tEKtOn") == WorkflowEngineType.TEKTON - assert WorkflowEngineType.get_instance_by_value("Tekton") == WorkflowEngineType.TEKTON # test invalid inputs with pytest.raises(KeyError): WorkflowEngineType.get_instance_by_value(None) # there is no default @@ -491,40 +474,8 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_custom_component_pipeline( with open(compiled_argo_output_file) as fh: argo_spec = yaml.safe_load(fh.read()) - assert "argoproj.io/" in argo_spec["apiVersion"] - pipeline_spec_annotations = json.loads(argo_spec["metadata"]["annotations"]["pipelines.kubeflow.org/pipeline_spec"]) - assert ( - pipeline_spec_annotations["name"] == pipeline.name - ), f"DSL input: {generated_argo_dsl}\nArgo output: {argo_spec}" - assert pipeline_spec_annotations["description"] == pipeline.description, pipeline_spec_annotations - - # generate Python DSL for the Tekton workflow engine - generated_tekton_dsl = processor._generate_pipeline_dsl( - pipeline=pipeline, pipeline_name=pipeline.name, workflow_engine=WorkflowEngineType.TEKTON - ) - - assert generated_tekton_dsl is not None - # Generated DSL includes workflow engine specific code in the _main_ function - assert "compiler.TektonCompiler().compile(" in generated_tekton_dsl - - compiled_tekton_output_file = Path(tmpdir) / "compiled_kfp_test_tekton.yaml" - - # if the compiler discovers an issue with the generated DSL this call fails - processor._compile_pipeline_dsl( - dsl=generated_tekton_dsl, - workflow_engine=WorkflowEngineType.TEKTON, - output_file=compiled_tekton_output_file.as_posix(), - pipeline_conf=None, - ) - - # verify that the output file exists - assert compiled_tekton_output_file.is_file() - - # verify the file content - with open(compiled_tekton_output_file) as fh: - tekton_spec = yaml.safe_load(fh.read()) - - assert "tekton.dev/" in tekton_spec["apiVersion"] + assert argo_spec["pipelineInfo"]["name"] == pipeline.name + assert argo_spec["pipelineInfo"]["description"] == pipeline.description @pytest.mark.parametrize( @@ -539,15 +490,6 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_custom_component_pipeline( / "kfp-one-node-generic.pipeline", "workflow_engine": WorkflowEngineType.ARGO, }, - { - "pipeline_file": Path(__file__).parent - / ".." - / "resources" - / "test_pipelines" - / "kfp" - / "kfp-one-node-generic.pipeline", - "workflow_engine": WorkflowEngineType.TEKTON, - }, ], indirect=True, ) @@ -594,16 +536,6 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_workflow_engine_test( experiment_name=experiment_name, ) - # Check the workflow engine specific code in the generated DSL - if workflow_engine == WorkflowEngineType.TEKTON: - assert "from kfp_tekton import compiler" in generated_dsl, f"engine: {workflow_engine}\ndsl: {generated_dsl}" - assert "compiler.TektonCompiler().compile(" in generated_dsl - assert "kfp.compiler.Compiler().compile(" not in generated_dsl - else: - assert "from kfp_tekton import compiler" not in generated_dsl - assert "compiler.TektonCompiler().compile(" not in generated_dsl - assert "kfp.compiler.Compiler().compile(" in generated_dsl - # Compile the generated Python DSL processor._compile_pipeline_dsl( dsl=generated_dsl, @@ -614,13 +546,11 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_workflow_engine_test( # Load compiled workflow with open(compiled_output_file_name) as f: - workflow_spec = yaml.safe_load(f.read()) + workflow_spec_docs = list(yaml.safe_load_all(f.read())) - # Verify that the output is for the specified workflow engine - if workflow_engine == WorkflowEngineType.TEKTON: - assert "tekton.dev/" in workflow_spec["apiVersion"] - else: - assert "argoproj.io/" in workflow_spec["apiVersion"] + assert len(workflow_spec_docs) == 2 + assert "components" in workflow_spec_docs[0] + assert "platforms" in workflow_spec_docs[1] @pytest.mark.parametrize( @@ -634,17 +564,6 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_workflow_engine_test( / "kfp" / "kfp-one-node-generic.pipeline", "workflow_engine": WorkflowEngineType.ARGO, - "use_cos_credentials_secret": True, - }, - { - "pipeline_file": Path(__file__).parent - / ".." - / "resources" - / "test_pipelines" - / "kfp" - / "kfp-one-node-generic.pipeline", - "workflow_engine": WorkflowEngineType.ARGO, - "use_cos_credentials_secret": False, }, ], indirect=True, @@ -672,7 +591,6 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_one_generic_node_pipeline_te runtime_config = metadata_dependencies["runtime_config"] assert runtime_config is not None assert runtime_config.name == pipeline.runtime_config - runtime_image_configs = metadata_dependencies["runtime_image_configs"] workflow_engine = WorkflowEngineType.get_instance_by_value(runtime_config.metadata["engine"]) @@ -714,28 +632,26 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_one_generic_node_pipeline_te pipeline_conf=None, ) - # Load generated Argo workflow + # Load generated workflow with open(compiled_argo_output_file_name) as f: - argo_spec = yaml.safe_load(f.read()) + spec_docs = list(yaml.safe_load_all(f.read())) - # verify that this is an argo specification - assert "argoproj.io" in argo_spec["apiVersion"] + assert len(spec_docs) == 2 + components, platforms = spec_docs[0], spec_docs[1]["platforms"] - pipeline_meta_annotations = json.loads(argo_spec["metadata"]["annotations"]["pipelines.kubeflow.org/pipeline_spec"]) - assert pipeline_meta_annotations["name"] == pipeline.name - assert pipeline_meta_annotations["description"] == pipeline.description + assert components["pipelineInfo"]["name"] == pipeline.name + assert components["pipelineInfo"]["description"] == pipeline.description # There should be two templates, one for the DAG and one for the generic node. # Locate the one for the generic node and inspect its properties. - assert len(argo_spec["spec"]["templates"]) == 2 - if argo_spec["spec"]["templates"][0]["name"] == argo_spec["spec"]["entrypoint"]: - node_template = argo_spec["spec"]["templates"][1] - else: - node_template = argo_spec["spec"]["templates"][0] + assert components["root"]["dag"] + assert len(components["components"]) == 1 + executors = components["deploymentSpec"]["executors"] + assert len(executors) == 1 + + node_template = components["deploymentSpec"]["executors"]["exec-run-a-file"] # Verify component definition information (see generic_component_definition_template.jinja2) - # - property 'name' - assert node_template["name"] == "run-a-file" # - property 'implementation.container.command' assert node_template["container"]["command"] == ["sh", "-c"] # - property 'implementation.container.args' @@ -749,11 +665,7 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_one_generic_node_pipeline_te # - the object storage bucket name that this node uses for file I/O assert f"--cos-bucket '{runtime_config.metadata['cos_bucket']}'" in node_template["container"]["args"][0] # - the directory within that object storage bucket - if pipeline.pipeline_properties.get(COS_OBJECT_PREFIX): - expected_directory_value = join_paths(pipeline.pipeline_properties.get(COS_OBJECT_PREFIX), pipeline_instance_id) - assert f"--cos-directory '{expected_directory_value}' " in node_template["container"]["args"][0] - else: - assert f"--cos-directory '{pipeline_instance_id}" in node_template["container"]["args"][0] + assert f"--cos-directory '{pipeline_instance_id}" in node_template["container"]["args"][0] # - the name of the archive in that directory expected_archive_name = processor._get_dependency_archive_name(op) assert f"--cos-dependencies-archive '{expected_archive_name}' " in node_template["container"]["args"][0] @@ -770,33 +682,26 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_one_generic_node_pipeline_te # - property 'implementation.container.image' assert node_template["container"]["image"] == op.runtime_image - # - property 'implementation.container.imagePullPolicy' - # The image pull policy is defined in the the runtime image - # configuration. Look it up and verified it is properly applied. - for runtime_image_config in runtime_image_configs: - if runtime_image_config.metadata["image_name"] == op.runtime_image: - if runtime_image_config.metadata.get("pull_policy"): - assert node_template["container"]["imagePullPolicy"] == runtime_image_config.metadata["pull_policy"] - else: - assert node_template["container"].get("imagePullPolicy") is None - break + + pod_metadata = platforms["kubernetes"]["deploymentSpec"]["executors"]["exec-run-a-file"]["podMetadata"] + assert pod_metadata # Verify Kubernetes labels and annotations that Elyra attaches to pods that # execute generic nodes or custom nodes if op.doc: # only set if a comment is attached to the node - assert node_template["metadata"]["annotations"].get("elyra/node-user-doc") == op.doc + assert pod_metadata["annotations"]["elyra/node-user-doc"] == op.doc # Verify Kubernetes labels and annotations that Elyra attaches to pods that # execute generic nodes - assert node_template["metadata"]["annotations"]["elyra/node-file-name"] == op.filename + assert pod_metadata["annotations"]["elyra/node-file-name"] == op.filename if pipeline.source: - assert node_template["metadata"]["annotations"]["elyra/pipeline-source"] == pipeline.source - assert node_template["metadata"]["labels"]["elyra/node-name"] == sanitize_label_value(op.name) - assert node_template["metadata"]["labels"]["elyra/node-type"] == sanitize_label_value("notebook-script") - assert node_template["metadata"]["labels"]["elyra/pipeline-name"] == sanitize_label_value(pipeline.name) - assert node_template["metadata"]["labels"]["elyra/pipeline-version"] == sanitize_label_value(pipeline_version) - assert node_template["metadata"]["labels"]["elyra/experiment-name"] == sanitize_label_value(experiment_name) + assert pod_metadata["annotations"]["elyra/pipeline-source"] == pipeline.source + assert pod_metadata["labels"]["elyra/node-name"] == sanitize_label_value(op.name) + assert pod_metadata["labels"]["elyra/node-type"] == sanitize_label_value("notebook-script") + assert pod_metadata["labels"]["elyra/pipeline-name"] == sanitize_label_value(pipeline.name) + assert pod_metadata["labels"]["elyra/pipeline-version"] == sanitize_label_value(pipeline_version) + assert pod_metadata["labels"]["elyra/experiment-name"] == sanitize_label_value(experiment_name) # Verify environment variables that Elyra attaches to pods that # execute generic nodes. All values are hard-coded in the template, with the @@ -828,20 +733,6 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_one_generic_node_pipeline_te else: assert env_var["value"] == runtime_config.metadata["cos_password"] - # Verify that the mlpipeline specific outputs are declared - assert node_template.get("outputs") is not None, node_template - assert node_template["outputs"]["artifacts"] is not None, node_template["container"]["outputs"] - assert node_template["outputs"]["artifacts"][0]["name"] == "mlpipeline-metrics" - assert ( - node_template["outputs"]["artifacts"][0]["path"] - == (Path(KfpPipelineProcessor.WCD) / "mlpipeline-metrics.json").as_posix() - ) - assert node_template["outputs"]["artifacts"][1]["name"] == "mlpipeline-ui-metadata" - assert ( - node_template["outputs"]["artifacts"][1]["path"] - == (Path(KfpPipelineProcessor.WCD) / "mlpipeline-ui-metadata.json").as_posix() - ) - @pytest.mark.parametrize( "metadata_dependencies", @@ -928,33 +819,34 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_one_generic_node_pipeline_te # Load generated Argo workflow with open(compiled_argo_output_file_name) as f: - argo_spec = yaml.safe_load(f.read()) + spec_docs = list(yaml.safe_load_all(f.read())) + + assert len(spec_docs) == 2 + components = spec_docs[0] - # verify that this is an argo specification - assert "argoproj.io" in argo_spec["apiVersion"] + assert components["pipelineInfo"]["name"] == pipeline.name + assert components["pipelineInfo"]["description"] == pipeline.description # There should be two templates, one for the DAG and one for the generic node. # Locate the one for the generic node and inspect its properties. - assert len(argo_spec["spec"]["templates"]) == 2 - if argo_spec["spec"]["templates"][0]["name"] == argo_spec["spec"]["entrypoint"]: - node_template = argo_spec["spec"]["templates"][1] - else: - node_template = argo_spec["spec"]["templates"][0] + assert components["root"]["dag"] + assert len(components["components"]) == 1 + executors = components["deploymentSpec"]["executors"] + assert len(executors) == 1 op = list(pipeline.operations.values())[0] + node_template = components["deploymentSpec"]["executors"]["exec-run-a-file"] + if op.gpu or op.cpu or op.memory or op.cpu_limit or op.memory_limit: - assert node_template["container"].get("resources") is not None + assert node_template["container"]["resources"] if op.gpu: - assert node_template["container"]["resources"]["limits"]["nvidia.com/gpu"] == str(op.gpu) + assert node_template["container"]["resources"]["accelerator"]["type"] == "nvidia.com/gpu" + assert node_template["container"]["resources"]["accelerator"]["count"] == str(op.gpu) if op.cpu: - assert node_template["container"]["resources"]["requests"]["cpu"] == str(op.cpu) + assert node_template["container"]["resources"]["cpuRequest"] == op.cpu if op.memory: - assert node_template["container"]["resources"]["requests"]["memory"] == f"{op.memory}G" - if op.cpu_limit: - assert node_template["container"]["resources"]["limits"]["cpu"] == str(op.cpu_limit) - if op.memory_limit: - assert node_template["container"]["resources"]["limits"]["memory"] == f"{op.memory_limit}G" + assert node_template["container"]["resources"]["memoryRequest"] == op.memory @pytest.fixture(autouse=False) @@ -973,156 +865,6 @@ def enable_and_disable_crio(request): del os.environ["CRIO_RUNTIME"] -@pytest.mark.parametrize("enable_and_disable_crio", [False, True], indirect=True) -@pytest.mark.parametrize( - "metadata_dependencies", - [ - { - "pipeline_file": Path(__file__).parent - / ".." - / "resources" - / "test_pipelines" - / "kfp" - / "kfp-one-node-generic.pipeline", - "workflow_engine": WorkflowEngineType.ARGO, - }, - ], - indirect=True, -) -def test_generate_pipeline_dsl_compile_pipeline_dsl_generic_component_crio( - monkeypatch, processor: KfpPipelineProcessor, metadata_dependencies: Dict[str, Any], tmpdir, enable_and_disable_crio -): - """ - This test validates that the output of _generate_pipeline_dsl and _compile_pipeline_dsl - yields the expected results for a generic node when the CRIO_RUNTIME environment variable - is set to a valid string representation of the boolean value True (/true/i). - Test assumptions: - - Enabling CRIO_RUNTIME has the same effect for all supported workflow engines - - The test pipeline contains at least one generic node - - With CRIO_RUNTIME enabled, the compiled output must include the following properties: - - in spec.templates[].volumes: - - emptyDir: {medium: '', sizeLimit: 20Gi} - name: workspace - """ - crio_runtime_enabled = os.environ.get("CRIO_RUNTIME", "").lower() == "true" - - # Obtain artifacts from metadata_dependencies fixture - test_pipeline_file = metadata_dependencies["pipeline_file"] - pipeline = metadata_dependencies["pipeline_object"] - assert pipeline is not None - runtime_config = metadata_dependencies["runtime_config"] - assert runtime_config is not None - - workflow_engine = WorkflowEngineType.get_instance_by_value(runtime_config.metadata["engine"]) - - # Mock calls that require access to object storage, because their side effects - # have no bearing on the outcome of this test. - monkeypatch.setattr(processor, "_upload_dependencies_to_object_store", lambda w, x, y, prefix: True) - monkeypatch.setattr(processor, "_verify_cos_connectivity", lambda x: True) - - # Mock pipeline to not include any parameters - monkeypatch.setattr(pipeline, "_pipeline_parameters", ElyraPropertyList([])) - - # Test begins here - - compiled_output_file = Path(tmpdir) / test_pipeline_file.with_suffix(".yaml").name - compiled_output_file_name = str(compiled_output_file.absolute()) - - # generate Python DSL for the specified workflow engine - pipeline_version = f"{pipeline.name}-test-0" - pipeline_instance_id = f"{pipeline.name}-{datetime.now().strftime('%m%d%H%M%S')}" - experiment_name = f"{pipeline.name}-test-0" - - generated_dsl = processor._generate_pipeline_dsl( - pipeline=pipeline, - pipeline_name=pipeline.name, - workflow_engine=workflow_engine, - pipeline_version=pipeline_version, - pipeline_instance_id=pipeline_instance_id, - experiment_name=experiment_name, - ) - - # Compile the DSL - processor._compile_pipeline_dsl( - dsl=generated_dsl, - workflow_engine=workflow_engine, - output_file=compiled_output_file_name, - pipeline_conf=None, - ) - - # Load compiled workflow - with open(compiled_output_file_name) as f: - compiled_spec = yaml.safe_load(f.read()) - - # There should be multiple templates, one for the DAG and one for every generic node. - assert len(compiled_spec["spec"]["templates"]) >= 2 - if crio_runtime_enabled: - for template in compiled_spec["spec"]["templates"]: - if template["name"] == compiled_spec["spec"]["entrypoint"]: - continue - # Check volume definition - assert template.get("volumes") is not None, template - entry_found = False - for volume_entry in template["volumes"]: - if volume_entry["name"] != CRIO_VOL_DEF_NAME: - continue - assert ( - volume_entry.get("emptyDir") is not None - ), f"Unexpected volume entry '{CRIO_VOL_DEF_NAME}': {volume_entry} " - assert volume_entry["emptyDir"]["sizeLimit"] == CRIO_VOL_DEF_SIZE - assert volume_entry["emptyDir"]["medium"] == CRIO_VOL_DEF_MEDIUM - entry_found = True - assert entry_found, f"Missing volume entry '{CRIO_VOL_DEF_NAME}' for CRI-O in {template['volumes']}" - # Check volume mount definition - assert template["container"].get("volumeMounts") is not None, template["container"] - for volumemount_entry in template["container"]["volumeMounts"]: - entry_found = False - if volumemount_entry["name"] != CRIO_VOL_DEF_NAME: - continue - assert volumemount_entry["mountPath"] == CRIO_VOL_MOUNT_PATH - entry_found = True - break - assert ( - entry_found - ), f"Missing volume mount entry '{CRIO_VOL_DEF_NAME}' for CRI-O in {template['container']['volumeMounts']}" - # Check PYTHONPATH environment variable (python_user_lib_path) - assert template["container"].get("env") is not None, template["container"] - for env_entry in template["container"]["env"]: - entry_found = False - if env_entry["name"] != "PYTHONPATH": - continue - assert env_entry["value"] == CRIO_VOL_PYTHON_PATH - entry_found = True - break - assert entry_found, f"Missing env variable entry 'PYTHONPATH' for CRI-O in {template['container']['env']}" - # Check the container command argument list - assert len(template["container"]["args"]) == 1 - assert f"mkdir -p {CRIO_VOL_WORKDIR_PATH}" in template["container"]["args"][0] - assert f"--target={CRIO_VOL_PYTHON_PATH}" in template["container"]["args"][0] - assert f"--user-volume-path '{CRIO_VOL_PYTHON_PATH}' " in template["container"]["args"][0] - else: - for template in compiled_spec["spec"]["templates"]: - if template["name"] == compiled_spec["spec"]["entrypoint"]: - continue - # Check if a volume was defined - for volume_entry in template.get("volumes", []): - if volume_entry["name"] == CRIO_VOL_DEF_NAME: - # if a volume with the 'reserved' name exist there could be a problem - assert volume_entry.get("emptyDir") is None - # Check volume mount definition - for volumemount_entry in template["container"].get("volumeMounts", []): - if volumemount_entry["name"] == CRIO_VOL_DEF_NAME: - assert volumemount_entry["mountPath"] != CRIO_VOL_MOUNT_PATH - # Check PYTHONPATH environment variable - for env_entry in template["container"].get("env", []): - assert env_entry["name"] != "PYTHONPATH" - # Check the container command argument list - assert "mkdir -p ./jupyter-work-dir" in template["container"]["args"][0] - assert f"--target={CRIO_VOL_PYTHON_PATH}" not in template["container"]["args"][0] - assert "--user-volume-path" not in template["container"]["args"][0] - - @pytest.mark.parametrize( "metadata_dependencies", [ @@ -1138,6 +880,10 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_generic_component_crio( ], indirect=True, ) +@pytest.mark.skip( + reason="This test is not compatible with KFP v2 as the generated YAML is ignoring \ + attributes from the source pipeline file" +) def test_generate_pipeline_dsl_compile_pipeline_dsl_optional_elyra_properties( monkeypatch, processor: KfpPipelineProcessor, metadata_dependencies: Dict[str, Any], tmpdir ): @@ -1201,76 +947,65 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_optional_elyra_properties( # Load compiled output with open(compiled_output_file_name) as fh: - compiled_spec = yaml.safe_load(fh.read()) + spec_docs = list(yaml.safe_load_all(fh.read())) - # There should be two templates, one for the DAG and one for the generic node. - # Locate the one for the generic node and inspect its properties. - assert len(compiled_spec["spec"]["templates"]) == 2 - if compiled_spec["spec"]["templates"][0]["name"] == compiled_spec["spec"]["entrypoint"]: - node_template = compiled_spec["spec"]["templates"][1] - else: - node_template = compiled_spec["spec"]["templates"][0] + assert len(spec_docs) == 2 + assert "components" in spec_docs[0] + assert "platforms" in spec_docs[1] # # validate data volumes, if applicable expected_volume_mounts = op.elyra_props.get(MOUNTED_VOLUMES) if len(expected_volume_mounts) > 0: # There must be one or more 'volumeMounts' entry and one or more 'volumes' entry - assert node_template["container"].get("volumeMounts") is not None, node_template["container"] - assert node_template.get("volumes") is not None, compiled_spec["spec"] + assert ( + spec_docs[1]["platforms"]["kubernetes"]["deploymentSpec"]["executors"]["exec-run-a-file"].get("pvcMount") + is not None + ), spec_docs[1]["platforms"]["kubernetes"] + pvc_mounts = spec_docs[1]["platforms"]["kubernetes"]["deploymentSpec"]["executors"]["exec-run-a-file"][ + "pvcMount" + ] - assert len(node_template["container"]["volumeMounts"]) >= len(expected_volume_mounts) + assert len(pvc_mounts) >= len(expected_volume_mounts) for volume_mount in expected_volume_mounts: - for volumemount_entry in node_template["container"]["volumeMounts"]: + for volumemount_entry in pvc_mounts: entry_found = False if volumemount_entry["mountPath"] == volume_mount.path: - assert volumemount_entry["name"] == volume_mount.pvc_name - assert volumemount_entry.get("subPath", None) == volume_mount.sub_path - assert volumemount_entry.get("readOnly", None) == volume_mount.read_only + assert volumemount_entry["constant"] == volume_mount.pvc_name + # the following attributes are currently ignored in KFP v2. + # Once they are implemented, the code below needs to be updated accordingly. + # Reference: https://github.com/kubeflow/pipelines/blob/master/ + # kubernetes_platform/proto/kubernetes_executor_config.proto#L84 + # + # assert volumemount_entry.get("subPath", None) == volume_mount.sub_path + # assert volumemount_entry.get("readOnly", False) == volume_mount.read_only entry_found = True break - assert ( - entry_found - ), f"Cannot find volume mount entry '{volume_mount.path}' in {node_template['container']['volumeMounts']}" - for volume_entry in node_template["volumes"]: - entry_found = False - if volume_entry["name"] == volume_mount.pvc_name: - assert volume_entry["persistentVolumeClaim"]["claimName"] == volume_mount.pvc_name - entry_found = True - break - assert ( - entry_found - ), f"Cannot find volume entry '{volume_mount.path}' in {node_template['container']['volumeMounts']}" + assert entry_found, f"Cannot find volume mount entry '{volume_mount.path}' in {pvc_mounts}" # # validate custom shared memory size, if applicable custom_shared_mem_size = op.elyra_props.get(KUBERNETES_SHARED_MEM_SIZE) if custom_shared_mem_size: # There must be one 'volumeMounts' entry and one 'volumes' entry - assert node_template["container"].get("volumeMounts") is not None, node_template["container"] - assert node_template.get("volumes") is not None, compiled_spec["spec"] - for volumemount_entry in node_template["container"]["volumeMounts"]: + assert ( + spec_docs[1]["platforms"]["kubernetes"]["deploymentSpec"]["executors"]["exec-run-a-file"].get("pvcMount") + is not None + ), spec_docs[1]["platforms"]["kubernetes"] + pvc_mounts = spec_docs[1]["platforms"]["kubernetes"]["deploymentSpec"]["executors"]["exec-run-a-file"][ + "pvcMount" + ] + + for volumemount_entry in pvc_mounts: entry_found = False if volumemount_entry["mountPath"] == "/dev/shm": assert volumemount_entry["name"] == "shm" entry_found = True break - assert ( - entry_found - ), "Missing volume mount entry for shared memory size in {node_template['container']['volumeMounts']}" - for volume_entry in node_template["volumes"]: - entry_found = False - if volume_entry["name"] == "shm": - assert volume_entry["emptyDir"]["medium"] == "Memory" - assert ( - volume_entry["emptyDir"]["sizeLimit"] - == f"{custom_shared_mem_size.size}{custom_shared_mem_size.units}" - ) - entry_found = True - break - assert ( - entry_found - ), f"Missing volume entry for shm size '{volume_mount.path}' in {node_template['container']['volumeMounts']}" + assert entry_found, "Missing volume mount entry for shared memory size in {pvc_mounts}" + + """ + IMPORTANT: TODO: The following code needs to be updated to the KFP v2 once the feature is implemented. # # validate Kubernetes secrets, if applicable @@ -1336,6 +1071,7 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_optional_elyra_properties( f"in {node_template['tolerations']}" ) assert entry_found, not_found_msg + """ @pytest.mark.parametrize( @@ -1411,20 +1147,23 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_generic_components_data_exch # Load compiled output with open(compiled_output_file_name) as fh: - compiled_spec = yaml.safe_load(fh.read()) + compiled_spec_docs = list(yaml.safe_load_all(fh.read())) + + assert len(compiled_spec_docs) == 2 + assert "components" in compiled_spec_docs[0] + assert "platforms" in compiled_spec_docs[1] # There should be at least four templates, one for the DAG and three # for generic nodes. Each template spec for generic nodes is named # "run-a-file[-index]". The "-index" is added by the compiler to # guarantee uniqueness. - assert len(compiled_spec["spec"]["templates"]) >= 3 + executors = compiled_spec_docs[0]["deploymentSpec"]["executors"] + assert len(executors) >= 3 template_specs = {} - for node_template in compiled_spec["spec"]["templates"]: - if node_template["name"] == compiled_spec["spec"]["entrypoint"] or not node_template["name"].startswith( - "run-a-file" - ): + for node_template_name, node_template in executors.items(): + if node_template_name == executors or not node_template_name.startswith("exec-run-a-file"): continue - template_specs[node_template["name"]] = node_template + template_specs[node_template_name] = node_template # Iterate through sorted operations and verify that their inputs # and outputs are properly represented in their respective template @@ -1435,9 +1174,9 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_generic_components_data_exch # ignore custom nodes continue if template_index == 1: - template_name = "run-a-file" + template_name = "exec-run-a-file" else: - template_name = f"run-a-file-{template_index}" + template_name = f"exec-run-a-file-{template_index}" template_index = template_index + 1 # compare outputs if len(op.outputs) > 0: @@ -1477,6 +1216,9 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_generic_components_data_exch ], indirect=True, ) +@pytest.mark.skip( + reason="This test is not compatible with KFP v2: There is no `imagePullSecrets` in the generated YAML to be verified." # noqa: E501 +) def test_generate_pipeline_dsl_compile_pipeline_dsl_generic_components_pipeline_conf( monkeypatch, processor: KfpPipelineProcessor, metadata_dependencies: Dict[str, Any], tmpdir ): @@ -1631,24 +1373,20 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_generic_components_with_para # Load compiled workflow with open(compiled_output_file_name) as f: - compiled_spec = yaml.safe_load(f.read()) + compiled_spec_docs = list(yaml.safe_load_all(f.read())) + + assert len(compiled_spec_docs) == 2 + assert "components" in compiled_spec_docs[0] + assert "platforms" in compiled_spec_docs[1] # Test parameters appear as expected - yaml_pipeline_params = compiled_spec["spec"]["arguments"]["parameters"] + yaml_pipeline_params = compiled_spec_docs[0]["root"]["inputDefinitions"]["parameters"] # Only two parameters are referenced by a node in the pipeline, so only 2 should be present in YAML assert len(yaml_pipeline_params) == 2 # Assert params defined in YAML correspond to those defined by the Pipeline object - for param_from_yaml in yaml_pipeline_params: - param_name, param_value = param_from_yaml.get("name"), param_from_yaml.get("value") - assert any(param.name == param_name and str(param.value) == param_value for param in pipeline.parameters) - - yaml_node_params = compiled_spec["spec"]["templates"][1]["inputs"]["parameters"] - # Only two parameters are referenced by this node, so only 2 should be present as inputs - assert len(yaml_node_params) == 2 - # Assert params defined in YAML correspond to those defined by the Pipeline object - for param_from_yaml in yaml_node_params: - param_name = param_from_yaml.get("name") - assert any(param.name == param_name for param in pipeline.parameters) + for param_name, param_info in yaml_pipeline_params.items(): + param_value = param_info["defaultValue"] + assert any(param.name == param_name and param.value == param_value for param in pipeline.parameters) def test_generate_pipeline_dsl_compile_pipeline_dsl_custom_components_with_parameters( @@ -1776,21 +1514,15 @@ def test_generate_pipeline_dsl_compile_pipeline_dsl_custom_components_with_param print(compiled_spec) # Test parameters appear as expected - yaml_pipeline_params = compiled_spec["spec"]["arguments"]["parameters"] + yaml_pipeline_params = compiled_spec["root"]["inputDefinitions"]["parameters"] # Only two parameters are referenced by a node in the pipeline, so only 1 should be present in YAML assert len(yaml_pipeline_params) == 1 # Assert params defined in YAML correspond to those defined by the Pipeline object - for param_from_yaml in yaml_pipeline_params: - param_name, param_value = param_from_yaml.get("name"), param_from_yaml.get("value") - assert any(param.name == param_name and str(param.value) == param_value for param in pipeline.parameters) - - yaml_node_params = compiled_spec["spec"]["templates"][0]["inputs"]["parameters"] - # Only two parameters are referenced by this node, so only 1 should be present as input - assert len(yaml_node_params) == 1 - # Assert params defined in YAML correspond to those defined by the Pipeline object - for param_from_yaml in yaml_node_params: - param_name = param_from_yaml.get("name") - assert any(param.name == param_name for param in pipeline.parameters) + for param_from_yaml_key, param_from_yaml_value in yaml_pipeline_params.items(): + assert any( + param.name == param_from_yaml_key and str(param.value) == param_from_yaml_value["defaultValue"] + for param in pipeline.parameters + ) def test_kfp_invalid_pipeline_parameter_type(): diff --git a/elyra/tests/pipeline/resources/components/filter_text.yaml b/elyra/tests/pipeline/resources/components/filter_text.yaml index 2f92f5abd..7130f0856 100644 --- a/elyra/tests/pipeline/resources/components/filter_text.yaml +++ b/elyra/tests/pipeline/resources/components/filter_text.yaml @@ -12,30 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Component source location: https://raw.githubusercontent.com/kubeflow/pipelines/master/components/sample/Shell_script/component.yaml +# Component source location: https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/sample/Shell_script/component.yaml # Component details: Takes a text file and a regex pattern filter to produce a filtered text file -name: Filter text +name: Filter text using shell and grep inputs: -- {name: Text, optional: false, description: 'Path to file to be filtered'} -- {name: Pattern, optional: true, default: '.*', description: 'Regex pattern'} + - { name: Text, type: String } + - { name: Pattern, default: ".*", type: String } outputs: -- {name: Filtered text} + - { name: Filtered text, type: String } metadata: annotations: author: Alexey Volkov + canonical_location: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/master/components/sample/Shell_script/component.yaml" implementation: container: image: alpine command: - - sh - - -ec - - | - text_path=$0 - pattern=$1 - filtered_text_path=$2 - mkdir -p "$(dirname "$filtered_text_path")" + - sh + - -ec + - | + text_path=$0 + pattern=$1 + filtered_text_path=$2 + mkdir -p "$(dirname "$filtered_text_path")" - grep "$pattern" < "$text_path" > "$filtered_text_path" - - {inputPath: Text} - - {inputValue: Pattern} - - {outputPath: Filtered text} + grep "$pattern" < "$text_path" > "$filtered_text_path" diff --git a/elyra/tests/pipeline/resources/validation_pipelines/kf_inputpath_parameter.pipeline b/elyra/tests/pipeline/resources/validation_pipelines/kf_inputpath_parameter.pipeline index c9b3982dc..6da18caf1 100644 --- a/elyra/tests/pipeline/resources/validation_pipelines/kf_inputpath_parameter.pipeline +++ b/elyra/tests/pipeline/resources/validation_pipelines/kf_inputpath_parameter.pipeline @@ -16,7 +16,7 @@ "component_parameters": { "url": { "widget": "string", - "value": "https://raw.githubusercontent.com/kubeflow/pipelines/93fc34474bf989998cf19445149aca2847eee763/components/notebooks/samples/test_notebook.ipynb" + "value": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/notebooks/samples/test_notebook.ipynb" }, "curl_options": { "widget": "string", @@ -25,7 +25,7 @@ "output_data": "" }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/web/Download/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/web/Download/component-sdk-v2.yaml", "ui_data": { "label": "Download data", "image": "/static/elyra/kubeflow.svg", @@ -156,7 +156,7 @@ "output_hash": "" }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/basics/Calculate_hash/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/basics/Calculate_hash/component.yaml", "ui_data": { "label": "Calculate data hash", "image": "/static/elyra/kubeflow.svg", diff --git a/elyra/tests/pipeline/resources/validation_pipelines/kf_invalid_inputpath_missing_connection.pipeline b/elyra/tests/pipeline/resources/validation_pipelines/kf_invalid_inputpath_missing_connection.pipeline index 3082695ff..3e1466127 100644 --- a/elyra/tests/pipeline/resources/validation_pipelines/kf_invalid_inputpath_missing_connection.pipeline +++ b/elyra/tests/pipeline/resources/validation_pipelines/kf_invalid_inputpath_missing_connection.pipeline @@ -91,7 +91,7 @@ } }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/web/Download/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/web/Download/component-sdk-v2.yaml", "ui_data": { "label": "Download data", "image": "/static/elyra/kubeflow.svg", @@ -148,7 +148,7 @@ } }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/basics/Calculate_hash/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/basics/Calculate_hash/component.yaml", "ui_data": { "label": "Calculate data hash", "image": "/static/elyra/kubeflow.svg", diff --git a/elyra/tests/pipeline/resources/validation_pipelines/kf_invalid_inputpath_parameter.pipeline b/elyra/tests/pipeline/resources/validation_pipelines/kf_invalid_inputpath_parameter.pipeline index 2a7f1bde2..e8cc85642 100644 --- a/elyra/tests/pipeline/resources/validation_pipelines/kf_invalid_inputpath_parameter.pipeline +++ b/elyra/tests/pipeline/resources/validation_pipelines/kf_invalid_inputpath_parameter.pipeline @@ -16,7 +16,7 @@ "component_parameters": { "url": { "widget": "string", - "value": "https://raw.githubusercontent.com/kubeflow/pipelines/93fc34474bf989998cf19445149aca2847eee763/components/notebooks/samples/test_notebook.ipynb" + "value": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/notebooks/samples/test_notebook.ipynb" }, "curl_options": { "widget": "string", @@ -25,7 +25,7 @@ "output_data": "" }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/web/Download/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/web/Download/component-sdk-v2.yaml", "ui_data": { "label": "Download data", "image": "/static/elyra/kubeflow.svg", @@ -155,7 +155,7 @@ "output_hash": "" }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/basics/Calculate_hash/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/basics/Calculate_hash/component.yaml", "ui_data": { "label": "Calculate data hash", "image": "/static/elyra/kubeflow.svg", diff --git a/elyra/tests/pipeline/resources/validation_pipelines/kf_supernode_invalid_single_cycle.pipeline b/elyra/tests/pipeline/resources/validation_pipelines/kf_supernode_invalid_single_cycle.pipeline index 08b0febac..8614dc6a0 100644 --- a/elyra/tests/pipeline/resources/validation_pipelines/kf_supernode_invalid_single_cycle.pipeline +++ b/elyra/tests/pipeline/resources/validation_pipelines/kf_supernode_invalid_single_cycle.pipeline @@ -19,7 +19,7 @@ "output_data": "" }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/web/Download/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/web/Download/component-sdk-v2.yaml", "ui_data": { "label": "Download data", "image": "data:image/svg+xml;utf8,%3Csvg%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%20viewBox%3D%220%200%20276.93%20274.55%22%3E%3Cg%20id%3D%22Layer_2%22%20data-name%3D%22Layer%202%22%3E%3Cg%20id%3D%22Layer_1-2%22%20data-name%3D%22Layer%201%22%3E%3Cpath%20d%3D%22M95.9%2C62.15%2C100%2C164.25l73.75-94.12a6.79%2C6.79%2C0%2C0%2C1%2C9.6-1.11l46%2C36.92-15-65.61Z%22%20fill%3D%22%234279f4%22%2F%3E%3Cpolygon%20points%3D%22102.55%20182.98%20167.97%20182.98%20127.8%20150.75%20102.55%20182.98%22%20fill%3D%22%230028aa%22%2F%3E%3Cpolygon%20points%3D%22180.18%2083.92%20136.18%20140.06%20183.06%20177.67%20227.53%20121.91%20180.18%2083.92%22%20fill%3D%22%23014bd1%22%2F%3E%3Cpolygon%20points%3D%2283.56%2052.3%2083.57%2052.29%20122.26%203.77%2059.87%2033.82%2044.46%20101.33%2083.56%2052.3%22%20fill%3D%22%23bedcff%22%2F%3E%3Cpolygon%20points%3D%2245.32%20122.05%2086.76%20174.01%2082.81%2075.03%2045.32%20122.05%22%20fill%3D%22%236ca1ff%22%2F%3E%3Cpolygon%20points%3D%22202.31%2028.73%20142.65%200%20105.52%2046.56%20202.31%2028.73%22%20fill%3D%22%23a1c3ff%22%2F%3E%3Cpath%20d%3D%22M1.6%2C272V227.22H7.34v23.41l20.48-23.41h6.4l-17.39%2C19.7%2C19%2C25.07H29.1l-15.92-20.8-5.84%2C6.65V272Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M41.62%2C262.21V240h5.43v22.39a4.67%2C4.67%2C0%2C0%2C0%2C2.35%2C4.19%2C11%2C11%2C0%2C0%2C0%2C11%2C0%2C4.69%2C4.69%2C0%2C0%2C0%2C2.33-4.19V240h5.43v22.19a9.08%2C9.08%2C0%2C0%2C1-4.1%2C7.87%2C16.2%2C16.2%2C0%2C0%2C1-18.37%2C0A9.07%2C9.07%2C0%2C0%2C1%2C41.62%2C262.21Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M77.46%2C272V224h5.43v16.81a29.29%2C29.29%2C0%2C0%2C1%2C9.32-1.73%2C13.1%2C13.1%2C0%2C0%2C1%2C6.2%2C1.41%2C10.71%2C10.71%2C0%2C0%2C1%2C4.18%2C3.74%2C18.07%2C18.07%2C0%2C0%2C1%2C2.23%2C5.06%2C21.26%2C21.26%2C0%2C0%2C1%2C.73%2C5.58q0%2C8.43-4.38%2C12.79T87.35%2C272Zm5.43-4.87h4.55q6.77%2C0%2C9.72-2.95t3-9.51a14.21%2C14.21%2C0%2C0%2C0-2-7.52%2C6.55%2C6.55%2C0%2C0%2C0-6-3.22%2C24.73%2C24.73%2C0%2C0%2C0-9.25%2C1.54Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M112.36%2C255.94q0-7.71%2C4.09-12.3a13.75%2C13.75%2C0%2C0%2C1%2C10.8-4.59q13.35%2C0%2C13.36%2C18.86H117.79a12.3%2C12.3%2C0%2C0%2C0%2C2.9%2C7.07q2.59%2C3.11%2C7.9%2C3.1a24.92%2C24.92%2C0%2C0%2C0%2C10.55-2v5a27.74%2C27.74%2C0%2C0%2C1-9.86%2C1.87%2C19.83%2C19.83%2C0%2C0%2C1-7.7-1.37%2C13.31%2C13.31%2C0%2C0%2C1-5.28-3.76%2C16.21%2C16.21%2C0%2C0%2C1-3-5.38A20.84%2C20.84%2C0%2C0%2C1%2C112.36%2C255.94Zm5.62-2.12h17.26a14.91%2C14.91%2C0%2C0%2C0-2.37-7.12%2C6.44%2C6.44%2C0%2C0%2C0-5.62-2.78%2C8.2%2C8.2%2C0%2C0%2C0-6.21%2C2.72A12.07%2C12.07%2C0%2C0%2C0%2C118%2C253.82Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M147.32%2C244.89V240h5v-7.59a8.14%2C8.14%2C0%2C0%2C1%2C2.31-6.05%2C7.79%2C7.79%2C0%2C0%2C1%2C5.69-2.28h7.86V229h-5c-2.21%2C0-3.67.45-4.37%2C1.34s-1.06%2C2.55-1.06%2C5V240h8.46v4.87h-8.46V272h-5.44v-27.1Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M175.26%2C272V224h5.43v48Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M194.41%2C268.05a17.86%2C17.86%2C0%2C1%2C1%2C12.33%2C4.9A16.57%2C16.57%2C0%2C0%2C1%2C194.41%2C268.05Zm3.84-20.65a13.16%2C13.16%2C0%2C0%2C0%2C0%2C17.2%2C12.07%2C12.07%2C0%2C0%2C0%2C17%2C0%2C13.09%2C13.09%2C0%2C0%2C0%2C0-17.2%2C12.07%2C12.07%2C0%2C0%2C0-17%2C0Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M228.45%2C240h5.75l7.3%2C25.32L248.93%2C240h5.36l7.34%2C25.34L269%2C240h5.74L264.7%2C272h-6.12l-6.83-24.58L245%2C272h-6.47Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3C%2Fg%3E%3C%2Fg%3E%3C%2Fsvg%3E", @@ -277,7 +277,7 @@ } }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/basics/Calculate_hash/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/basics/Calculate_hash/component.yaml", "ui_data": { "label": "Calculate data hash", "image": "data:image/svg+xml;utf8,%3Csvg%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%20viewBox%3D%220%200%20276.93%20274.55%22%3E%3Cg%20id%3D%22Layer_2%22%20data-name%3D%22Layer%202%22%3E%3Cg%20id%3D%22Layer_1-2%22%20data-name%3D%22Layer%201%22%3E%3Cpath%20d%3D%22M95.9%2C62.15%2C100%2C164.25l73.75-94.12a6.79%2C6.79%2C0%2C0%2C1%2C9.6-1.11l46%2C36.92-15-65.61Z%22%20fill%3D%22%234279f4%22%2F%3E%3Cpolygon%20points%3D%22102.55%20182.98%20167.97%20182.98%20127.8%20150.75%20102.55%20182.98%22%20fill%3D%22%230028aa%22%2F%3E%3Cpolygon%20points%3D%22180.18%2083.92%20136.18%20140.06%20183.06%20177.67%20227.53%20121.91%20180.18%2083.92%22%20fill%3D%22%23014bd1%22%2F%3E%3Cpolygon%20points%3D%2283.56%2052.3%2083.57%2052.29%20122.26%203.77%2059.87%2033.82%2044.46%20101.33%2083.56%2052.3%22%20fill%3D%22%23bedcff%22%2F%3E%3Cpolygon%20points%3D%2245.32%20122.05%2086.76%20174.01%2082.81%2075.03%2045.32%20122.05%22%20fill%3D%22%236ca1ff%22%2F%3E%3Cpolygon%20points%3D%22202.31%2028.73%20142.65%200%20105.52%2046.56%20202.31%2028.73%22%20fill%3D%22%23a1c3ff%22%2F%3E%3Cpath%20d%3D%22M1.6%2C272V227.22H7.34v23.41l20.48-23.41h6.4l-17.39%2C19.7%2C19%2C25.07H29.1l-15.92-20.8-5.84%2C6.65V272Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M41.62%2C262.21V240h5.43v22.39a4.67%2C4.67%2C0%2C0%2C0%2C2.35%2C4.19%2C11%2C11%2C0%2C0%2C0%2C11%2C0%2C4.69%2C4.69%2C0%2C0%2C0%2C2.33-4.19V240h5.43v22.19a9.08%2C9.08%2C0%2C0%2C1-4.1%2C7.87%2C16.2%2C16.2%2C0%2C0%2C1-18.37%2C0A9.07%2C9.07%2C0%2C0%2C1%2C41.62%2C262.21Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M77.46%2C272V224h5.43v16.81a29.29%2C29.29%2C0%2C0%2C1%2C9.32-1.73%2C13.1%2C13.1%2C0%2C0%2C1%2C6.2%2C1.41%2C10.71%2C10.71%2C0%2C0%2C1%2C4.18%2C3.74%2C18.07%2C18.07%2C0%2C0%2C1%2C2.23%2C5.06%2C21.26%2C21.26%2C0%2C0%2C1%2C.73%2C5.58q0%2C8.43-4.38%2C12.79T87.35%2C272Zm5.43-4.87h4.55q6.77%2C0%2C9.72-2.95t3-9.51a14.21%2C14.21%2C0%2C0%2C0-2-7.52%2C6.55%2C6.55%2C0%2C0%2C0-6-3.22%2C24.73%2C24.73%2C0%2C0%2C0-9.25%2C1.54Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M112.36%2C255.94q0-7.71%2C4.09-12.3a13.75%2C13.75%2C0%2C0%2C1%2C10.8-4.59q13.35%2C0%2C13.36%2C18.86H117.79a12.3%2C12.3%2C0%2C0%2C0%2C2.9%2C7.07q2.59%2C3.11%2C7.9%2C3.1a24.92%2C24.92%2C0%2C0%2C0%2C10.55-2v5a27.74%2C27.74%2C0%2C0%2C1-9.86%2C1.87%2C19.83%2C19.83%2C0%2C0%2C1-7.7-1.37%2C13.31%2C13.31%2C0%2C0%2C1-5.28-3.76%2C16.21%2C16.21%2C0%2C0%2C1-3-5.38A20.84%2C20.84%2C0%2C0%2C1%2C112.36%2C255.94Zm5.62-2.12h17.26a14.91%2C14.91%2C0%2C0%2C0-2.37-7.12%2C6.44%2C6.44%2C0%2C0%2C0-5.62-2.78%2C8.2%2C8.2%2C0%2C0%2C0-6.21%2C2.72A12.07%2C12.07%2C0%2C0%2C0%2C118%2C253.82Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M147.32%2C244.89V240h5v-7.59a8.14%2C8.14%2C0%2C0%2C1%2C2.31-6.05%2C7.79%2C7.79%2C0%2C0%2C1%2C5.69-2.28h7.86V229h-5c-2.21%2C0-3.67.45-4.37%2C1.34s-1.06%2C2.55-1.06%2C5V240h8.46v4.87h-8.46V272h-5.44v-27.1Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M175.26%2C272V224h5.43v48Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M194.41%2C268.05a17.86%2C17.86%2C0%2C1%2C1%2C12.33%2C4.9A16.57%2C16.57%2C0%2C0%2C1%2C194.41%2C268.05Zm3.84-20.65a13.16%2C13.16%2C0%2C0%2C0%2C0%2C17.2%2C12.07%2C12.07%2C0%2C0%2C0%2C17%2C0%2C13.09%2C13.09%2C0%2C0%2C0%2C0-17.2%2C12.07%2C12.07%2C0%2C0%2C0-17%2C0Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M228.45%2C240h5.75l7.3%2C25.32L248.93%2C240h5.36l7.34%2C25.34L269%2C240h5.74L264.7%2C272h-6.12l-6.83-24.58L245%2C272h-6.47Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3C%2Fg%3E%3C%2Fg%3E%3C%2Fsvg%3E", diff --git a/elyra/tests/pipeline/resources/validation_pipelines/kf_supernode_valid.pipeline b/elyra/tests/pipeline/resources/validation_pipelines/kf_supernode_valid.pipeline index 3429ecb4b..0240f8ea7 100644 --- a/elyra/tests/pipeline/resources/validation_pipelines/kf_supernode_valid.pipeline +++ b/elyra/tests/pipeline/resources/validation_pipelines/kf_supernode_valid.pipeline @@ -19,7 +19,7 @@ "output_data": "" }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/web/Download/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/web/Download/component-sdk-v2.yaml", "ui_data": { "label": "Download data", "image": "data:image/svg+xml;utf8,%3Csvg%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%20viewBox%3D%220%200%20276.93%20274.55%22%3E%3Cg%20id%3D%22Layer_2%22%20data-name%3D%22Layer%202%22%3E%3Cg%20id%3D%22Layer_1-2%22%20data-name%3D%22Layer%201%22%3E%3Cpath%20d%3D%22M95.9%2C62.15%2C100%2C164.25l73.75-94.12a6.79%2C6.79%2C0%2C0%2C1%2C9.6-1.11l46%2C36.92-15-65.61Z%22%20fill%3D%22%234279f4%22%2F%3E%3Cpolygon%20points%3D%22102.55%20182.98%20167.97%20182.98%20127.8%20150.75%20102.55%20182.98%22%20fill%3D%22%230028aa%22%2F%3E%3Cpolygon%20points%3D%22180.18%2083.92%20136.18%20140.06%20183.06%20177.67%20227.53%20121.91%20180.18%2083.92%22%20fill%3D%22%23014bd1%22%2F%3E%3Cpolygon%20points%3D%2283.56%2052.3%2083.57%2052.29%20122.26%203.77%2059.87%2033.82%2044.46%20101.33%2083.56%2052.3%22%20fill%3D%22%23bedcff%22%2F%3E%3Cpolygon%20points%3D%2245.32%20122.05%2086.76%20174.01%2082.81%2075.03%2045.32%20122.05%22%20fill%3D%22%236ca1ff%22%2F%3E%3Cpolygon%20points%3D%22202.31%2028.73%20142.65%200%20105.52%2046.56%20202.31%2028.73%22%20fill%3D%22%23a1c3ff%22%2F%3E%3Cpath%20d%3D%22M1.6%2C272V227.22H7.34v23.41l20.48-23.41h6.4l-17.39%2C19.7%2C19%2C25.07H29.1l-15.92-20.8-5.84%2C6.65V272Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M41.62%2C262.21V240h5.43v22.39a4.67%2C4.67%2C0%2C0%2C0%2C2.35%2C4.19%2C11%2C11%2C0%2C0%2C0%2C11%2C0%2C4.69%2C4.69%2C0%2C0%2C0%2C2.33-4.19V240h5.43v22.19a9.08%2C9.08%2C0%2C0%2C1-4.1%2C7.87%2C16.2%2C16.2%2C0%2C0%2C1-18.37%2C0A9.07%2C9.07%2C0%2C0%2C1%2C41.62%2C262.21Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M77.46%2C272V224h5.43v16.81a29.29%2C29.29%2C0%2C0%2C1%2C9.32-1.73%2C13.1%2C13.1%2C0%2C0%2C1%2C6.2%2C1.41%2C10.71%2C10.71%2C0%2C0%2C1%2C4.18%2C3.74%2C18.07%2C18.07%2C0%2C0%2C1%2C2.23%2C5.06%2C21.26%2C21.26%2C0%2C0%2C1%2C.73%2C5.58q0%2C8.43-4.38%2C12.79T87.35%2C272Zm5.43-4.87h4.55q6.77%2C0%2C9.72-2.95t3-9.51a14.21%2C14.21%2C0%2C0%2C0-2-7.52%2C6.55%2C6.55%2C0%2C0%2C0-6-3.22%2C24.73%2C24.73%2C0%2C0%2C0-9.25%2C1.54Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M112.36%2C255.94q0-7.71%2C4.09-12.3a13.75%2C13.75%2C0%2C0%2C1%2C10.8-4.59q13.35%2C0%2C13.36%2C18.86H117.79a12.3%2C12.3%2C0%2C0%2C0%2C2.9%2C7.07q2.59%2C3.11%2C7.9%2C3.1a24.92%2C24.92%2C0%2C0%2C0%2C10.55-2v5a27.74%2C27.74%2C0%2C0%2C1-9.86%2C1.87%2C19.83%2C19.83%2C0%2C0%2C1-7.7-1.37%2C13.31%2C13.31%2C0%2C0%2C1-5.28-3.76%2C16.21%2C16.21%2C0%2C0%2C1-3-5.38A20.84%2C20.84%2C0%2C0%2C1%2C112.36%2C255.94Zm5.62-2.12h17.26a14.91%2C14.91%2C0%2C0%2C0-2.37-7.12%2C6.44%2C6.44%2C0%2C0%2C0-5.62-2.78%2C8.2%2C8.2%2C0%2C0%2C0-6.21%2C2.72A12.07%2C12.07%2C0%2C0%2C0%2C118%2C253.82Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M147.32%2C244.89V240h5v-7.59a8.14%2C8.14%2C0%2C0%2C1%2C2.31-6.05%2C7.79%2C7.79%2C0%2C0%2C1%2C5.69-2.28h7.86V229h-5c-2.21%2C0-3.67.45-4.37%2C1.34s-1.06%2C2.55-1.06%2C5V240h8.46v4.87h-8.46V272h-5.44v-27.1Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M175.26%2C272V224h5.43v48Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M194.41%2C268.05a17.86%2C17.86%2C0%2C1%2C1%2C12.33%2C4.9A16.57%2C16.57%2C0%2C0%2C1%2C194.41%2C268.05Zm3.84-20.65a13.16%2C13.16%2C0%2C0%2C0%2C0%2C17.2%2C12.07%2C12.07%2C0%2C0%2C0%2C17%2C0%2C13.09%2C13.09%2C0%2C0%2C0%2C0-17.2%2C12.07%2C12.07%2C0%2C0%2C0-17%2C0Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M228.45%2C240h5.75l7.3%2C25.32L248.93%2C240h5.36l7.34%2C25.34L269%2C240h5.74L264.7%2C272h-6.12l-6.83-24.58L245%2C272h-6.47Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3C%2Fg%3E%3C%2Fg%3E%3C%2Fsvg%3E", @@ -263,7 +263,7 @@ } }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/basics/Calculate_hash/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/basics/Calculate_hash/component.yaml", "ui_data": { "label": "Calculate data hash", "image": "data:image/svg+xml;utf8,%3Csvg%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%20viewBox%3D%220%200%20276.93%20274.55%22%3E%3Cg%20id%3D%22Layer_2%22%20data-name%3D%22Layer%202%22%3E%3Cg%20id%3D%22Layer_1-2%22%20data-name%3D%22Layer%201%22%3E%3Cpath%20d%3D%22M95.9%2C62.15%2C100%2C164.25l73.75-94.12a6.79%2C6.79%2C0%2C0%2C1%2C9.6-1.11l46%2C36.92-15-65.61Z%22%20fill%3D%22%234279f4%22%2F%3E%3Cpolygon%20points%3D%22102.55%20182.98%20167.97%20182.98%20127.8%20150.75%20102.55%20182.98%22%20fill%3D%22%230028aa%22%2F%3E%3Cpolygon%20points%3D%22180.18%2083.92%20136.18%20140.06%20183.06%20177.67%20227.53%20121.91%20180.18%2083.92%22%20fill%3D%22%23014bd1%22%2F%3E%3Cpolygon%20points%3D%2283.56%2052.3%2083.57%2052.29%20122.26%203.77%2059.87%2033.82%2044.46%20101.33%2083.56%2052.3%22%20fill%3D%22%23bedcff%22%2F%3E%3Cpolygon%20points%3D%2245.32%20122.05%2086.76%20174.01%2082.81%2075.03%2045.32%20122.05%22%20fill%3D%22%236ca1ff%22%2F%3E%3Cpolygon%20points%3D%22202.31%2028.73%20142.65%200%20105.52%2046.56%20202.31%2028.73%22%20fill%3D%22%23a1c3ff%22%2F%3E%3Cpath%20d%3D%22M1.6%2C272V227.22H7.34v23.41l20.48-23.41h6.4l-17.39%2C19.7%2C19%2C25.07H29.1l-15.92-20.8-5.84%2C6.65V272Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M41.62%2C262.21V240h5.43v22.39a4.67%2C4.67%2C0%2C0%2C0%2C2.35%2C4.19%2C11%2C11%2C0%2C0%2C0%2C11%2C0%2C4.69%2C4.69%2C0%2C0%2C0%2C2.33-4.19V240h5.43v22.19a9.08%2C9.08%2C0%2C0%2C1-4.1%2C7.87%2C16.2%2C16.2%2C0%2C0%2C1-18.37%2C0A9.07%2C9.07%2C0%2C0%2C1%2C41.62%2C262.21Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M77.46%2C272V224h5.43v16.81a29.29%2C29.29%2C0%2C0%2C1%2C9.32-1.73%2C13.1%2C13.1%2C0%2C0%2C1%2C6.2%2C1.41%2C10.71%2C10.71%2C0%2C0%2C1%2C4.18%2C3.74%2C18.07%2C18.07%2C0%2C0%2C1%2C2.23%2C5.06%2C21.26%2C21.26%2C0%2C0%2C1%2C.73%2C5.58q0%2C8.43-4.38%2C12.79T87.35%2C272Zm5.43-4.87h4.55q6.77%2C0%2C9.72-2.95t3-9.51a14.21%2C14.21%2C0%2C0%2C0-2-7.52%2C6.55%2C6.55%2C0%2C0%2C0-6-3.22%2C24.73%2C24.73%2C0%2C0%2C0-9.25%2C1.54Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M112.36%2C255.94q0-7.71%2C4.09-12.3a13.75%2C13.75%2C0%2C0%2C1%2C10.8-4.59q13.35%2C0%2C13.36%2C18.86H117.79a12.3%2C12.3%2C0%2C0%2C0%2C2.9%2C7.07q2.59%2C3.11%2C7.9%2C3.1a24.92%2C24.92%2C0%2C0%2C0%2C10.55-2v5a27.74%2C27.74%2C0%2C0%2C1-9.86%2C1.87%2C19.83%2C19.83%2C0%2C0%2C1-7.7-1.37%2C13.31%2C13.31%2C0%2C0%2C1-5.28-3.76%2C16.21%2C16.21%2C0%2C0%2C1-3-5.38A20.84%2C20.84%2C0%2C0%2C1%2C112.36%2C255.94Zm5.62-2.12h17.26a14.91%2C14.91%2C0%2C0%2C0-2.37-7.12%2C6.44%2C6.44%2C0%2C0%2C0-5.62-2.78%2C8.2%2C8.2%2C0%2C0%2C0-6.21%2C2.72A12.07%2C12.07%2C0%2C0%2C0%2C118%2C253.82Z%22%20fill%3D%22%234279f4%22%20stroke%3D%22%234279f4%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M147.32%2C244.89V240h5v-7.59a8.14%2C8.14%2C0%2C0%2C1%2C2.31-6.05%2C7.79%2C7.79%2C0%2C0%2C1%2C5.69-2.28h7.86V229h-5c-2.21%2C0-3.67.45-4.37%2C1.34s-1.06%2C2.55-1.06%2C5V240h8.46v4.87h-8.46V272h-5.44v-27.1Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M175.26%2C272V224h5.43v48Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M194.41%2C268.05a17.86%2C17.86%2C0%2C1%2C1%2C12.33%2C4.9A16.57%2C16.57%2C0%2C0%2C1%2C194.41%2C268.05Zm3.84-20.65a13.16%2C13.16%2C0%2C0%2C0%2C0%2C17.2%2C12.07%2C12.07%2C0%2C0%2C0%2C17%2C0%2C13.09%2C13.09%2C0%2C0%2C0%2C0-17.2%2C12.07%2C12.07%2C0%2C0%2C0-17%2C0Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3Cpath%20d%3D%22M228.45%2C240h5.75l7.3%2C25.32L248.93%2C240h5.36l7.34%2C25.34L269%2C240h5.74L264.7%2C272h-6.12l-6.83-24.58L245%2C272h-6.47Z%22%20fill%3D%22%230028aa%22%20stroke%3D%22%230028aa%22%20stroke-miterlimit%3D%2210%22%20stroke-width%3D%223.2%22%2F%3E%3C%2Fg%3E%3C%2Fg%3E%3C%2Fsvg%3E", diff --git a/elyra/tests/pipeline/resources/validation_pipelines/kf_with_parameters.pipeline b/elyra/tests/pipeline/resources/validation_pipelines/kf_with_parameters.pipeline index 9e7b0d52b..65251e5fb 100644 --- a/elyra/tests/pipeline/resources/validation_pipelines/kf_with_parameters.pipeline +++ b/elyra/tests/pipeline/resources/validation_pipelines/kf_with_parameters.pipeline @@ -47,7 +47,7 @@ "output_data": "" }, "label": "", - "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/1.6.0/components/web/Download/component.yaml", + "component_source": "https://raw.githubusercontent.com/kubeflow/pipelines/sdk-2.9.0/components/contrib/web/Download/component-sdk-v2.yaml", "ui_data": { "label": "Download data", "image": "/static/elyra/kubeflow.svg", diff --git a/elyra/tests/util/test_kubernetes.py b/elyra/tests/util/test_kubernetes.py index 497444231..6f3454b54 100644 --- a/elyra/tests/util/test_kubernetes.py +++ b/elyra/tests/util/test_kubernetes.py @@ -87,7 +87,7 @@ def test_is_valid_label_key_invalid_input(): assert not is_valid_label_key(key="/n") # prefix too short assert not is_valid_label_key(key="p/") # name too short assert not is_valid_label_key(key="a" * 254) # name too long - assert not is_valid_label_key(key=f"d/{'b' * 64}") # name too long + assert not is_valid_label_key(key=f"d / {'b' * 64}") # name too long # test first character violations (not alphanum) assert not is_valid_label_key(key="-a") assert not is_valid_label_key(key=".b") @@ -175,7 +175,7 @@ def test_is_valid_annotation_key_invalid_input(): assert not is_valid_annotation_key(key="/n") # prefix too short assert not is_valid_annotation_key(key="p/") # name too short assert not is_valid_annotation_key(key="a" * 254) # name too long - assert not is_valid_annotation_key(key=f"d/{'b' * 64}") # name too long + assert not is_valid_annotation_key(key=f"d / {'b' * 64}") # name too long # test first character violations (not alphanum) assert not is_valid_annotation_key(key="-a") assert not is_valid_annotation_key(key=".b") diff --git a/etc/docker/README.md b/etc/docker/README.md index 063fd6fdd..607808f6f 100644 --- a/etc/docker/README.md +++ b/etc/docker/README.md @@ -25,5 +25,5 @@ Builds the Elyra image for use as standalone or with JupyterHub. See [Deploying Elyra on OpenShift is available via Open Data Hub. Elyra Image is available via [quay.io](https://quay.io/repository/thoth-station/s2i-lab-elyra?tab=tags) and supporting image build files via [s2i-lab-elyra](https://github.com/opendatahub-io/s2i-lab-elyra) ### kubeflow (Open Data Hub) -Build the Elyra image for use with Kubeflow's Notebook Server. This image includes all kfp dependencies (i.e. kfp, kfp-tekton). See [Using Elyra with the Kubeflow Notebook Server](https://elyra.readthedocs.io/en/latest/recipes/using-elyra-with-kubeflow-notebook-server.html) +Build the Elyra image for use with Kubeflow's Notebook Server. This image includes all kfp dependencies. See [Using Elyra with the Kubeflow Notebook Server](https://elyra.readthedocs.io/en/latest/recipes/using-elyra-with-kubeflow-notebook-server.html) diff --git a/etc/docker/elyra/Dockerfile b/etc/docker/elyra/Dockerfile index 5fe8489ce..7603c2cfd 100644 --- a/etc/docker/elyra/Dockerfile +++ b/etc/docker/elyra/Dockerfile @@ -23,8 +23,8 @@ ARG TAG="dev" ARG ELYRA_VERSION ARG ELYRA_PACKAGE=elyra-"$ELYRA_VERSION"-py3-none-any.whl -# - Include with KFP Tekton support ('kfp-tekton') and component examples ('kfp-examples') -ARG ELYRA_EXTRAS=[kfp-tekton,kfp-examples,gitlab] +# - Include with KFP support and component examples ('kfp-examples') +ARG ELYRA_EXTRAS=[kfp,kfp-examples,gitlab] COPY requirements.txt ${ELYRA_PACKAGE} ./ diff --git a/etc/docker/kubeflow/Dockerfile b/etc/docker/kubeflow/Dockerfile index d3bc96e77..d4ec36f9a 100644 --- a/etc/docker/kubeflow/Dockerfile +++ b/etc/docker/kubeflow/Dockerfile @@ -23,8 +23,8 @@ ARG TAG="dev" ARG ELYRA_VERSION ARG ELYRA_PACKAGE=elyra-"$ELYRA_VERSION"-py3-none-any.whl -# - Include with KFP Tekton support ('kfp-tekton') and component examples ('kfp-examples') -ARG ELYRA_EXTRAS=[kfp-tekton,kfp-examples,gitlab] +# - Include with KFP support and component examples ('kfp-examples') +ARG ELYRA_EXTRAS=[kfp,kfp-examples,gitlab] # Includes the readme as a token file for COPY that always exists, otherwise production builds fail when whl not present COPY README.md ${ELYRA_PACKAGE}* ./ diff --git a/pyproject.toml b/pyproject.toml index 1ee46ce42..7b775eddc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,8 @@ dependencies = [ "yaspin", # see: https://stackoverflow.com/questions/76175487/sudden-importerror-cannot-import-name-appengine-from-requests-packages-urlli "appengine-python-standard", + "kfp==2.9.0", # Cap kfp for protobuf compatibility + "kfp-kubernetes>=1.0.0", "pygithub", "black>=22.8.0", ] @@ -86,7 +88,6 @@ test = [ "pytest_virtualenv", "requests-mock", "requests-unixsocket", - "kfp-tekton" ] airflow = [ @@ -95,21 +96,18 @@ airflow-gitlab = [ "python-gitlab" ] kfp = [ - "kfp>=1.7.0,<2.0,!=1.7.2", # We cap the SDK to <2.0 due to possible breaking changes + "kfp==2.9.0", # Cap kfp for protobuf compatibility "typing-extensions>=3.10,<5", # Cap from kfp "mistune<3.1.0", -] -kfp-tekton = [ - "kfp-tekton>=1.5.2", # requires kfp >= 1.8.19, which contains fix for Jupyterlab - "mistune<3.1.0", + "kfp-kubernetes>=1.0.0", ] kfp-examples = [ "elyra-examples-kfp-catalog", ] # The following is a collection of "non-test" extra dependencies from above. all = [ - "kfp>=1.7.0,<2.0,!=1.7.2", # We cap the SDK to <2.0 due to possible breaking changes - "kfp-tekton>=1.5.2", + "kfp==2.9.0", # Cap kfp for protobuf compatibility + "kfp-kubernetes>=1.0.0", "elyra-examples-kfp-catalog", "python-gitlab", "typing-extensions>=3.10,<5", # Cap from kfp diff --git a/test_requirements.txt b/test_requirements.txt index 206edfc4a..271d00c21 100644 --- a/test_requirements.txt +++ b/test_requirements.txt @@ -9,5 +9,4 @@ pytest_jupyter pytest-tornasync pytest_virtualenv requests-mock -requests-unixsocket -kfp-tekton +requests-unixsocket \ No newline at end of file